diff --git a/librarian.yaml b/librarian.yaml index 055b3cd3d8ce..8aa2166518e7 100644 --- a/librarian.yaml +++ b/librarian.yaml @@ -16,8 +16,8 @@ version: v0.11.0 repo: googleapis/google-cloud-python sources: googleapis: - commit: 4ad1b6750926701f94ae8a88525395fd17b42cfe - sha256: 4a54e2829977dea29fb0dde856b182a009873c9f1296e4df3ccb35c62511137b + commit: 8bd905897f61fb6f2e7d8b7cb3e2ca41d0cbc9c8 + sha256: bad85ba8a113ccb3dbe9bf54b80f25068a76a2b9c7d59b6af98a524f40712e69 release: ignored_changes: - .repo-metadata.json diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py index bf9e916a7bb6..4f7b3afba6a7 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/__init__.py @@ -43,6 +43,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetMetadataRequest, GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, @@ -52,6 +53,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + Metadata, PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, @@ -62,8 +64,8 @@ ReportTaskMetadata, RunFunnelReportRequest, RunFunnelReportResponse, - SheetExportAudienceListRequest, - SheetExportAudienceListResponse, + RunReportRequest, + RunReportResponse, WebhookNotification, ) from .types.data import ( @@ -72,10 +74,15 @@ CohortReportSettings, CohortSpec, CohortsRange, + Comparison, + ComparisonMetadata, + ConversionMetadata, + ConversionSpec, DateRange, Dimension, DimensionExpression, DimensionHeader, + DimensionMetadata, DimensionValue, EmptyFilter, EventCriteriaScoping, @@ -104,6 +111,7 @@ Metric, MetricAggregation, MetricHeader, + MetricMetadata, MetricType, MetricValue, NumericFilter, @@ -116,6 +124,7 @@ Row, SamplingLevel, SamplingMetadata, + Section, Segment, SegmentEventFilter, SegmentFilter, @@ -250,6 +259,10 @@ def _get_version(dependency_name): "CohortReportSettings", "CohortSpec", "CohortsRange", + "Comparison", + "ComparisonMetadata", + "ConversionMetadata", + "ConversionSpec", "CreateAudienceListRequest", "CreateRecurringAudienceListRequest", "CreateReportTaskRequest", @@ -257,6 +270,7 @@ def _get_version(dependency_name): "Dimension", "DimensionExpression", "DimensionHeader", + "DimensionMetadata", "DimensionValue", "EmptyFilter", "EventCriteriaScoping", @@ -282,6 +296,7 @@ def _get_version(dependency_name): "FunnelStep", "FunnelSubReport", "GetAudienceListRequest", + "GetMetadataRequest", "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", @@ -292,9 +307,11 @@ def _get_version(dependency_name): "ListRecurringAudienceListsResponse", "ListReportTasksRequest", "ListReportTasksResponse", + "Metadata", "Metric", "MetricAggregation", "MetricHeader", + "MetricMetadata", "MetricType", "MetricValue", "NumericFilter", @@ -315,8 +332,11 @@ def _get_version(dependency_name): "Row", "RunFunnelReportRequest", "RunFunnelReportResponse", + "RunReportRequest", + "RunReportResponse", "SamplingLevel", "SamplingMetadata", + "Section", "Segment", "SegmentEventFilter", "SegmentFilter", @@ -333,8 +353,6 @@ def _get_version(dependency_name): "SessionSegmentConditionGroup", "SessionSegmentCriteria", "SessionSegmentExclusion", - "SheetExportAudienceListRequest", - "SheetExportAudienceListResponse", "StringFilter", "UserCriteriaScoping", "UserExclusionDuration", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json index bb6c0b6f462d..5789ccfbbdd6 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/gapic_metadata.json @@ -30,6 +30,11 @@ "get_audience_list" ] }, + "GetMetadata": { + "methods": [ + "get_metadata" + ] + }, "GetPropertyQuotasSnapshot": { "methods": [ "get_property_quotas_snapshot" @@ -75,9 +80,9 @@ "run_funnel_report" ] }, - "SheetExportAudienceList": { + "RunReport": { "methods": [ - "sheet_export_audience_list" + "run_report" ] } } @@ -105,6 +110,11 @@ "get_audience_list" ] }, + "GetMetadata": { + "methods": [ + "get_metadata" + ] + }, "GetPropertyQuotasSnapshot": { "methods": [ "get_property_quotas_snapshot" @@ -150,9 +160,9 @@ "run_funnel_report" ] }, - "SheetExportAudienceList": { + "RunReport": { "methods": [ - "sheet_export_audience_list" + "run_report" ] } } @@ -180,6 +190,11 @@ "get_audience_list" ] }, + "GetMetadata": { + "methods": [ + "get_metadata" + ] + }, "GetPropertyQuotasSnapshot": { "methods": [ "get_property_quotas_snapshot" @@ -225,9 +240,9 @@ "run_funnel_report" ] }, - "SheetExportAudienceList": { + "RunReport": { "methods": [ - "sheet_export_audience_list" + "run_report" ] } } diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py index ea738ae51608..b8f064f33398 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/async_client.py @@ -82,6 +82,8 @@ class AlphaAnalyticsDataAsyncClient: parse_audience_list_path = staticmethod( AlphaAnalyticsDataClient.parse_audience_list_path ) + metadata_path = staticmethod(AlphaAnalyticsDataClient.metadata_path) + parse_metadata_path = staticmethod(AlphaAnalyticsDataClient.parse_metadata_path) property_quotas_snapshot_path = staticmethod( AlphaAnalyticsDataClient.property_quotas_snapshot_path ) @@ -724,139 +726,6 @@ async def sample_query_audience_list(): # Done; return the response. return response - async def sheet_export_audience_list( - self, - request: Optional[ - Union[analytics_data_api.SheetExportAudienceListRequest, dict] - ] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analytics_data_api.SheetExportAudienceListResponse: - r"""Exports an audience list of users to a Google Sheet. After - creating an audience, the users are not immediately available - for listing. First, a request to ``CreateAudienceList`` is - necessary to create an audience list of users, and then second, - this method is used to export those users in the audience list - to a Google Sheet. - - See `Creating an Audience - List `__ - for an introduction to Audience Lists with examples. - - Audiences in Google Analytics 4 allow you to segment your users - in the ways that are important to your business. To learn more, - see https://support.google.com/analytics/answer/9267572. - - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google - Analytics Audience Export API - Feedback `__ form. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.analytics import data_v1alpha - - async def sample_sheet_export_audience_list(): - # Create a client - client = data_v1alpha.AlphaAnalyticsDataAsyncClient() - - # Initialize request argument(s) - request = data_v1alpha.SheetExportAudienceListRequest( - name="name_value", - ) - - # Make the request - response = await client.sheet_export_audience_list(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.analytics.data_v1alpha.types.SheetExportAudienceListRequest, dict]]): - The request object. A request to export users in an - audience list to a Google Sheet. - name (:class:`str`): - Required. The name of the audience list to retrieve - users from. Format: - ``properties/{property}/audienceLists/{audience_list}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.analytics.data_v1alpha.types.SheetExportAudienceListResponse: - The created Google Sheet with the - list of users in an audience list. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analytics_data_api.SheetExportAudienceListRequest): - request = analytics_data_api.SheetExportAudienceListRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._client._transport._wrapped_methods[ - self._client._transport.sheet_export_audience_list - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._client._validate_universe_domain() - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - async def get_audience_list( self, request: Optional[ @@ -2178,6 +2047,231 @@ async def sample_list_report_tasks(): # Done; return the response. return response + async def run_report( + self, + request: Optional[Union[analytics_data_api.RunReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analytics_data_api.RunReportResponse: + r"""Returns a customized report of your Google Analytics + event data. Reports contain statistics derived from data + collected by the Google Analytics tracking code. The + data returned from the API is as a table with columns + for the requested dimensions and metrics. Metrics are + individual measurements of user activity on your + property, such as active users or event count. + Dimensions break down metrics across some common + criteria, such as country or event name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_run_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.RunReportRequest( + property="property_value", + ) + + # Make the request + response = await client.run_report(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.RunReportRequest, dict]]): + The request object. The request to generate a report. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.analytics.data_v1alpha.types.RunReportResponse: + The response report table + corresponding to a request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunReportRequest): + request = analytics_data_api.RunReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.run_report + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_metadata( + self, + request: Optional[Union[analytics_data_api.GetMetadataRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analytics_data_api.Metadata: + r"""Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + async def sample_get_metadata(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.GetMetadataRequest( + name="name_value", + ) + + # Make the request + response = await client.get_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.analytics.data_v1alpha.types.GetMetadataRequest, dict]]): + The request object. Request for a property's dimension + and metric metadata. + name (:class:`str`): + Required. The resource name of the metadata to retrieve. + This name field is specified in the URL path and not URL + parameters. Property is a numeric Google Analytics + property identifier. To learn more, see `where to find + your Property + ID `__. + + Example: properties/1234/metadata + + Set the Property ID to 0 for dimensions and metrics + common to all properties. In this special mode, this + method will not return custom dimensions and metrics. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.analytics.data_v1alpha.types.Metadata: + The dimensions, metrics and + comparisons currently accepted in + reporting methods. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetMetadataRequest): + request = analytics_data_api.GetMetadataRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_metadata + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "AlphaAnalyticsDataAsyncClient": return self diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py index e5a6370766a7..f0b41ca7880f 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/client.py @@ -250,6 +250,21 @@ def parse_audience_list_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def metadata_path( + property: str, + ) -> str: + """Returns a fully-qualified metadata string.""" + return "properties/{property}/metadata".format( + property=property, + ) + + @staticmethod + def parse_metadata_path(path: str) -> Dict[str, str]: + """Parses a metadata path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/metadata$", path) + return m.groupdict() if m else {} + @staticmethod def property_quotas_snapshot_path( property: str, @@ -1182,138 +1197,6 @@ def sample_query_audience_list(): # Done; return the response. return response - def sheet_export_audience_list( - self, - request: Optional[ - Union[analytics_data_api.SheetExportAudienceListRequest, dict] - ] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analytics_data_api.SheetExportAudienceListResponse: - r"""Exports an audience list of users to a Google Sheet. After - creating an audience, the users are not immediately available - for listing. First, a request to ``CreateAudienceList`` is - necessary to create an audience list of users, and then second, - this method is used to export those users in the audience list - to a Google Sheet. - - See `Creating an Audience - List `__ - for an introduction to Audience Lists with examples. - - Audiences in Google Analytics 4 allow you to segment your users - in the ways that are important to your business. To learn more, - see https://support.google.com/analytics/answer/9267572. - - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google - Analytics Audience Export API - Feedback `__ form. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.analytics import data_v1alpha - - def sample_sheet_export_audience_list(): - # Create a client - client = data_v1alpha.AlphaAnalyticsDataClient() - - # Initialize request argument(s) - request = data_v1alpha.SheetExportAudienceListRequest( - name="name_value", - ) - - # Make the request - response = client.sheet_export_audience_list(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.analytics.data_v1alpha.types.SheetExportAudienceListRequest, dict]): - The request object. A request to export users in an - audience list to a Google Sheet. - name (str): - Required. The name of the audience list to retrieve - users from. Format: - ``properties/{property}/audienceLists/{audience_list}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. - - Returns: - google.analytics.data_v1alpha.types.SheetExportAudienceListResponse: - The created Google Sheet with the - list of users in an audience list. - - """ - # Create or coerce a protobuf request object. - # - Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - flattened_params = [name] - has_flattened_params = ( - len([param for param in flattened_params if param is not None]) > 0 - ) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # - Use the request object if provided (there's no risk of modifying the input as - # there are no flattened fields), or create one. - if not isinstance(request, analytics_data_api.SheetExportAudienceListRequest): - request = analytics_data_api.SheetExportAudienceListRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[ - self._transport.sheet_export_audience_list - ] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Validate the universe domain. - self._validate_universe_domain() - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - def get_audience_list( self, request: Optional[ @@ -2613,6 +2496,226 @@ def sample_list_report_tasks(): # Done; return the response. return response + def run_report( + self, + request: Optional[Union[analytics_data_api.RunReportRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analytics_data_api.RunReportResponse: + r"""Returns a customized report of your Google Analytics + event data. Reports contain statistics derived from data + collected by the Google Analytics tracking code. The + data returned from the API is as a table with columns + for the requested dimensions and metrics. Metrics are + individual measurements of user activity on your + property, such as active users or event count. + Dimensions break down metrics across some common + criteria, such as country or event name. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_run_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.RunReportRequest( + property="property_value", + ) + + # Make the request + response = client.run_report(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.RunReportRequest, dict]): + The request object. The request to generate a report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.analytics.data_v1alpha.types.RunReportResponse: + The response report table + corresponding to a request. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.RunReportRequest): + request = analytics_data_api.RunReportRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.run_report] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("property", request.property),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_metadata( + self, + request: Optional[Union[analytics_data_api.GetMetadataRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analytics_data_api.Metadata: + r"""Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.analytics import data_v1alpha + + def sample_get_metadata(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.GetMetadataRequest( + name="name_value", + ) + + # Make the request + response = client.get_metadata(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.analytics.data_v1alpha.types.GetMetadataRequest, dict]): + The request object. Request for a property's dimension + and metric metadata. + name (str): + Required. The resource name of the metadata to retrieve. + This name field is specified in the URL path and not URL + parameters. Property is a numeric Google Analytics + property identifier. To learn more, see `where to find + your Property + ID `__. + + Example: properties/1234/metadata + + Set the Property ID to 0 for dimensions and metrics + common to all properties. In this special mode, this + method will not return custom dimensions and metrics. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.analytics.data_v1alpha.types.Metadata: + The dimensions, metrics and + comparisons currently accepted in + reporting methods. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_data_api.GetMetadataRequest): + request = analytics_data_api.GetMetadataRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_metadata] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AlphaAnalyticsDataClient": return self diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py index 1af5bcd61c19..0c1607a7bb7b 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/base.py @@ -43,9 +43,6 @@ class AlphaAnalyticsDataTransport(abc.ABC): AUTH_SCOPES = ( "https://www.googleapis.com/auth/analytics", "https://www.googleapis.com/auth/analytics.readonly", - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/spreadsheets", ) DEFAULT_HOST: str = "analyticsdata.googleapis.com" @@ -164,11 +161,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.sheet_export_audience_list: gapic_v1.method.wrap_method( - self.sheet_export_audience_list, - default_timeout=None, - client_info=client_info, - ), self.get_audience_list: gapic_v1.method.wrap_method( self.get_audience_list, default_timeout=None, @@ -219,6 +211,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.run_report: gapic_v1.method.wrap_method( + self.run_report, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata: gapic_v1.method.wrap_method( + self.get_metadata, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -268,18 +270,6 @@ def query_audience_list( ]: raise NotImplementedError() - @property - def sheet_export_audience_list( - self, - ) -> Callable[ - [analytics_data_api.SheetExportAudienceListRequest], - Union[ - analytics_data_api.SheetExportAudienceListResponse, - Awaitable[analytics_data_api.SheetExportAudienceListResponse], - ], - ]: - raise NotImplementedError() - @property def get_audience_list( self, @@ -393,6 +383,27 @@ def list_report_tasks( ]: raise NotImplementedError() + @property + def run_report( + self, + ) -> Callable[ + [analytics_data_api.RunReportRequest], + Union[ + analytics_data_api.RunReportResponse, + Awaitable[analytics_data_api.RunReportResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_metadata( + self, + ) -> Callable[ + [analytics_data_api.GetMetadataRequest], + Union[analytics_data_api.Metadata, Awaitable[analytics_data_api.Metadata]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py index fea629dfd74f..9b341d29075c 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc.py @@ -489,56 +489,6 @@ def query_audience_list( ) return self._stubs["query_audience_list"] - @property - def sheet_export_audience_list( - self, - ) -> Callable[ - [analytics_data_api.SheetExportAudienceListRequest], - analytics_data_api.SheetExportAudienceListResponse, - ]: - r"""Return a callable for the sheet export audience list method over gRPC. - - Exports an audience list of users to a Google Sheet. After - creating an audience, the users are not immediately available - for listing. First, a request to ``CreateAudienceList`` is - necessary to create an audience list of users, and then second, - this method is used to export those users in the audience list - to a Google Sheet. - - See `Creating an Audience - List `__ - for an introduction to Audience Lists with examples. - - Audiences in Google Analytics 4 allow you to segment your users - in the ways that are important to your business. To learn more, - see https://support.google.com/analytics/answer/9267572. - - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google - Analytics Audience Export API - Feedback `__ form. - - Returns: - Callable[[~.SheetExportAudienceListRequest], - ~.SheetExportAudienceListResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "sheet_export_audience_list" not in self._stubs: - self._stubs["sheet_export_audience_list"] = ( - self._logged_channel.unary_unary( - "/google.analytics.data.v1alpha.AlphaAnalyticsData/SheetExportAudienceList", - request_serializer=analytics_data_api.SheetExportAudienceListRequest.serialize, - response_deserializer=analytics_data_api.SheetExportAudienceListResponse.deserialize, - ) - ) - return self._stubs["sheet_export_audience_list"] - @property def get_audience_list( self, @@ -919,6 +869,78 @@ def list_report_tasks( ) return self._stubs["list_report_tasks"] + @property + def run_report( + self, + ) -> Callable[ + [analytics_data_api.RunReportRequest], analytics_data_api.RunReportResponse + ]: + r"""Return a callable for the run report method over gRPC. + + Returns a customized report of your Google Analytics + event data. Reports contain statistics derived from data + collected by the Google Analytics tracking code. The + data returned from the API is as a table with columns + for the requested dimensions and metrics. Metrics are + individual measurements of user activity on your + property, such as active users or event count. + Dimensions break down metrics across some common + criteria, such as country or event name. + + Returns: + Callable[[~.RunReportRequest], + ~.RunReportResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_report" not in self._stubs: + self._stubs["run_report"] = self._logged_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/RunReport", + request_serializer=analytics_data_api.RunReportRequest.serialize, + response_deserializer=analytics_data_api.RunReportResponse.deserialize, + ) + return self._stubs["run_report"] + + @property + def get_metadata( + self, + ) -> Callable[[analytics_data_api.GetMetadataRequest], analytics_data_api.Metadata]: + r"""Return a callable for the get metadata method over gRPC. + + Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + Returns: + Callable[[~.GetMetadataRequest], + ~.Metadata]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata" not in self._stubs: + self._stubs["get_metadata"] = self._logged_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetMetadata", + request_serializer=analytics_data_api.GetMetadataRequest.serialize, + response_deserializer=analytics_data_api.Metadata.deserialize, + ) + return self._stubs["get_metadata"] + def close(self): self._logged_channel.close() diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py index fa488e1556d1..5d31ff395507 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/grpc_asyncio.py @@ -498,56 +498,6 @@ def query_audience_list( ) return self._stubs["query_audience_list"] - @property - def sheet_export_audience_list( - self, - ) -> Callable[ - [analytics_data_api.SheetExportAudienceListRequest], - Awaitable[analytics_data_api.SheetExportAudienceListResponse], - ]: - r"""Return a callable for the sheet export audience list method over gRPC. - - Exports an audience list of users to a Google Sheet. After - creating an audience, the users are not immediately available - for listing. First, a request to ``CreateAudienceList`` is - necessary to create an audience list of users, and then second, - this method is used to export those users in the audience list - to a Google Sheet. - - See `Creating an Audience - List `__ - for an introduction to Audience Lists with examples. - - Audiences in Google Analytics 4 allow you to segment your users - in the ways that are important to your business. To learn more, - see https://support.google.com/analytics/answer/9267572. - - This method is introduced at alpha stability with the intention - of gathering feedback on syntax and capabilities before entering - beta. To give your feedback on this API, complete the `Google - Analytics Audience Export API - Feedback `__ form. - - Returns: - Callable[[~.SheetExportAudienceListRequest], - Awaitable[~.SheetExportAudienceListResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "sheet_export_audience_list" not in self._stubs: - self._stubs["sheet_export_audience_list"] = ( - self._logged_channel.unary_unary( - "/google.analytics.data.v1alpha.AlphaAnalyticsData/SheetExportAudienceList", - request_serializer=analytics_data_api.SheetExportAudienceListRequest.serialize, - response_deserializer=analytics_data_api.SheetExportAudienceListResponse.deserialize, - ) - ) - return self._stubs["sheet_export_audience_list"] - @property def get_audience_list( self, @@ -931,6 +881,81 @@ def list_report_tasks( ) return self._stubs["list_report_tasks"] + @property + def run_report( + self, + ) -> Callable[ + [analytics_data_api.RunReportRequest], + Awaitable[analytics_data_api.RunReportResponse], + ]: + r"""Return a callable for the run report method over gRPC. + + Returns a customized report of your Google Analytics + event data. Reports contain statistics derived from data + collected by the Google Analytics tracking code. The + data returned from the API is as a table with columns + for the requested dimensions and metrics. Metrics are + individual measurements of user activity on your + property, such as active users or event count. + Dimensions break down metrics across some common + criteria, such as country or event name. + + Returns: + Callable[[~.RunReportRequest], + Awaitable[~.RunReportResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "run_report" not in self._stubs: + self._stubs["run_report"] = self._logged_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/RunReport", + request_serializer=analytics_data_api.RunReportRequest.serialize, + response_deserializer=analytics_data_api.RunReportResponse.deserialize, + ) + return self._stubs["run_report"] + + @property + def get_metadata( + self, + ) -> Callable[ + [analytics_data_api.GetMetadataRequest], Awaitable[analytics_data_api.Metadata] + ]: + r"""Return a callable for the get metadata method over gRPC. + + Returns metadata for dimensions and metrics available in + reporting methods. Used to explore the dimensions and metrics. + In this method, a Google Analytics property identifier is + specified in the request, and the metadata response includes + Custom dimensions and metrics as well as Universal metadata. + + For example if a custom metric with parameter name + ``levels_unlocked`` is registered to a property, the Metadata + response will contain ``customEvent:levels_unlocked``. Universal + metadata are dimensions and metrics applicable to any property + such as ``country`` and ``totalUsers``. + + Returns: + Callable[[~.GetMetadataRequest], + Awaitable[~.Metadata]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_metadata" not in self._stubs: + self._stubs["get_metadata"] = self._logged_channel.unary_unary( + "/google.analytics.data.v1alpha.AlphaAnalyticsData/GetMetadata", + request_serializer=analytics_data_api.GetMetadataRequest.serialize, + response_deserializer=analytics_data_api.Metadata.deserialize, + ) + return self._stubs["get_metadata"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -949,11 +974,6 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), - self.sheet_export_audience_list: self._wrap_method( - self.sheet_export_audience_list, - default_timeout=None, - client_info=client_info, - ), self.get_audience_list: self._wrap_method( self.get_audience_list, default_timeout=None, @@ -1004,6 +1024,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.run_report: self._wrap_method( + self.run_report, + default_timeout=None, + client_info=client_info, + ), + self.get_metadata: self._wrap_method( + self.get_metadata, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py index 3c95d9779d7f..b39668e21ee3 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest.py @@ -105,6 +105,14 @@ def post_get_audience_list(self, response): logging.log(f"Received response: {response}") return response + def pre_get_metadata(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_metadata(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_property_quotas_snapshot(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -177,11 +185,11 @@ def post_run_funnel_report(self, response): logging.log(f"Received response: {response}") return response - def pre_sheet_export_audience_list(self, request, metadata): + def pre_run_report(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata - def post_sheet_export_audience_list(self, response): + def post_run_report(self, response): logging.log(f"Received response: {response}") return response @@ -392,6 +400,54 @@ def post_get_audience_list_with_metadata( """ return response, metadata + def pre_get_metadata( + self, + request: analytics_data_api.GetMetadataRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_data_api.GetMetadataRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_metadata + + Override in a subclass to manipulate the request or metadata + before they are sent to the AlphaAnalyticsData server. + """ + return request, metadata + + def post_get_metadata( + self, response: analytics_data_api.Metadata + ) -> analytics_data_api.Metadata: + """Post-rpc interceptor for get_metadata + + DEPRECATED. Please use the `post_get_metadata_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AlphaAnalyticsData server but before + it is returned to user code. This `post_get_metadata` interceptor runs + before the `post_get_metadata_with_metadata` interceptor. + """ + return response + + def post_get_metadata_with_metadata( + self, + response: analytics_data_api.Metadata, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[analytics_data_api.Metadata, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_metadata + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AlphaAnalyticsData server but before it is returned to user code. + + We recommend only using this `post_get_metadata_with_metadata` + interceptor in new development instead of the `post_get_metadata` interceptor. + When both interceptors are used, this `post_get_metadata_with_metadata` interceptor runs after the + `post_get_metadata` interceptor. The (possibly modified) response returned by + `post_get_metadata` will be passed to + `post_get_metadata_with_metadata`. + """ + return response, metadata + def pre_get_property_quotas_snapshot( self, request: analytics_data_api.GetPropertyQuotasSnapshotRequest, @@ -856,55 +912,53 @@ def post_run_funnel_report_with_metadata( """ return response, metadata - def pre_sheet_export_audience_list( + def pre_run_report( self, - request: analytics_data_api.SheetExportAudienceListRequest, + request: analytics_data_api.RunReportRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - analytics_data_api.SheetExportAudienceListRequest, - Sequence[Tuple[str, Union[str, bytes]]], + analytics_data_api.RunReportRequest, Sequence[Tuple[str, Union[str, bytes]]] ]: - """Pre-rpc interceptor for sheet_export_audience_list + """Pre-rpc interceptor for run_report Override in a subclass to manipulate the request or metadata before they are sent to the AlphaAnalyticsData server. """ return request, metadata - def post_sheet_export_audience_list( - self, response: analytics_data_api.SheetExportAudienceListResponse - ) -> analytics_data_api.SheetExportAudienceListResponse: - """Post-rpc interceptor for sheet_export_audience_list + def post_run_report( + self, response: analytics_data_api.RunReportResponse + ) -> analytics_data_api.RunReportResponse: + """Post-rpc interceptor for run_report - DEPRECATED. Please use the `post_sheet_export_audience_list_with_metadata` + DEPRECATED. Please use the `post_run_report_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the AlphaAnalyticsData server but before - it is returned to user code. This `post_sheet_export_audience_list` interceptor runs - before the `post_sheet_export_audience_list_with_metadata` interceptor. + it is returned to user code. This `post_run_report` interceptor runs + before the `post_run_report_with_metadata` interceptor. """ return response - def post_sheet_export_audience_list_with_metadata( + def post_run_report_with_metadata( self, - response: analytics_data_api.SheetExportAudienceListResponse, + response: analytics_data_api.RunReportResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]], ) -> Tuple[ - analytics_data_api.SheetExportAudienceListResponse, - Sequence[Tuple[str, Union[str, bytes]]], + analytics_data_api.RunReportResponse, Sequence[Tuple[str, Union[str, bytes]]] ]: - """Post-rpc interceptor for sheet_export_audience_list + """Post-rpc interceptor for run_report Override in a subclass to read or manipulate the response or metadata after it is returned by the AlphaAnalyticsData server but before it is returned to user code. - We recommend only using this `post_sheet_export_audience_list_with_metadata` - interceptor in new development instead of the `post_sheet_export_audience_list` interceptor. - When both interceptors are used, this `post_sheet_export_audience_list_with_metadata` interceptor runs after the - `post_sheet_export_audience_list` interceptor. The (possibly modified) response returned by - `post_sheet_export_audience_list` will be passed to - `post_sheet_export_audience_list_with_metadata`. + We recommend only using this `post_run_report_with_metadata` + interceptor in new development instead of the `post_run_report` interceptor. + When both interceptors are used, this `post_run_report_with_metadata` interceptor runs after the + `post_run_report` interceptor. The (possibly modified) response returned by + `post_run_report` will be passed to + `post_run_report_with_metadata`. """ return response, metadata @@ -1652,6 +1706,153 @@ def __call__( ) return resp + class _GetMetadata( + _BaseAlphaAnalyticsDataRestTransport._BaseGetMetadata, + AlphaAnalyticsDataRestStub, + ): + def __hash__(self): + return hash("AlphaAnalyticsDataRestTransport.GetMetadata") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: analytics_data_api.GetMetadataRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> analytics_data_api.Metadata: + r"""Call the get metadata method over HTTP. + + Args: + request (~.analytics_data_api.GetMetadataRequest): + The request object. Request for a property's dimension + and metric metadata. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.analytics_data_api.Metadata: + The dimensions, metrics and + comparisons currently accepted in + reporting methods. + + """ + + http_options = _BaseAlphaAnalyticsDataRestTransport._BaseGetMetadata._get_http_options() + + request, metadata = self._interceptor.pre_get_metadata(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseGetMetadata._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseGetMetadata._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.analytics.data_v1alpha.AlphaAnalyticsDataClient.GetMetadata", + extra={ + "serviceName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "rpcName": "GetMetadata", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AlphaAnalyticsDataRestTransport._GetMetadata._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = analytics_data_api.Metadata() + pb_resp = analytics_data_api.Metadata.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_metadata(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_metadata_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = analytics_data_api.Metadata.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_metadata", + extra={ + "serviceName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "rpcName": "GetMetadata", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetPropertyQuotasSnapshot( _BaseAlphaAnalyticsDataRestTransport._BaseGetPropertyQuotasSnapshot, AlphaAnalyticsDataRestStub, @@ -3029,12 +3230,11 @@ def __call__( ) return resp - class _SheetExportAudienceList( - _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList, - AlphaAnalyticsDataRestStub, + class _RunReport( + _BaseAlphaAnalyticsDataRestTransport._BaseRunReport, AlphaAnalyticsDataRestStub ): def __hash__(self): - return hash("AlphaAnalyticsDataRestTransport.SheetExportAudienceList") + return hash("AlphaAnalyticsDataRestTransport.RunReport") @staticmethod def _get_response( @@ -3061,49 +3261,47 @@ def _get_response( def __call__( self, - request: analytics_data_api.SheetExportAudienceListRequest, + request: analytics_data_api.RunReportRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> analytics_data_api.SheetExportAudienceListResponse: - r"""Call the sheet export audience - list method over HTTP. + ) -> analytics_data_api.RunReportResponse: + r"""Call the run report method over HTTP. - Args: - request (~.analytics_data_api.SheetExportAudienceListRequest): - The request object. A request to export users in an - audience list to a Google Sheet. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.analytics_data_api.RunReportRequest): + The request object. The request to generate a report. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.analytics_data_api.SheetExportAudienceListResponse: - The created Google Sheet with the - list of users in an audience list. + Returns: + ~.analytics_data_api.RunReportResponse: + The response report table + corresponding to a request. """ - http_options = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_http_options() - - request, metadata = self._interceptor.pre_sheet_export_audience_list( - request, metadata + http_options = ( + _BaseAlphaAnalyticsDataRestTransport._BaseRunReport._get_http_options() ) - transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_transcoded_request( + + request, metadata = self._interceptor.pre_run_report(request, metadata) + transcoded_request = _BaseAlphaAnalyticsDataRestTransport._BaseRunReport._get_transcoded_request( http_options, request ) - body = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_request_body_json( + body = _BaseAlphaAnalyticsDataRestTransport._BaseRunReport._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_query_params_json( + query_params = _BaseAlphaAnalyticsDataRestTransport._BaseRunReport._get_query_params_json( transcoded_request ) @@ -3125,26 +3323,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.analytics.data_v1alpha.AlphaAnalyticsDataClient.SheetExportAudienceList", + f"Sending request for google.analytics.data_v1alpha.AlphaAnalyticsDataClient.RunReport", extra={ "serviceName": "google.analytics.data.v1alpha.AlphaAnalyticsData", - "rpcName": "SheetExportAudienceList", + "rpcName": "RunReport", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ( - AlphaAnalyticsDataRestTransport._SheetExportAudienceList._get_response( - self._host, - metadata, - query_params, - self._session, - timeout, - transcoded_request, - body, - ) + response = AlphaAnalyticsDataRestTransport._RunReport._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3153,24 +3349,22 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = analytics_data_api.SheetExportAudienceListResponse() - pb_resp = analytics_data_api.SheetExportAudienceListResponse.pb(resp) + resp = analytics_data_api.RunReportResponse() + pb_resp = analytics_data_api.RunReportResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_sheet_export_audience_list(resp) + resp = self._interceptor.post_run_report(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_sheet_export_audience_list_with_metadata( + resp, _ = self._interceptor.post_run_report_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = ( - analytics_data_api.SheetExportAudienceListResponse.to_json( - response - ) + response_payload = analytics_data_api.RunReportResponse.to_json( + response ) except: response_payload = None @@ -3180,10 +3374,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.analytics.data_v1alpha.AlphaAnalyticsDataClient.sheet_export_audience_list", + "Received response for google.analytics.data_v1alpha.AlphaAnalyticsDataClient.run_report", extra={ "serviceName": "google.analytics.data.v1alpha.AlphaAnalyticsData", - "rpcName": "SheetExportAudienceList", + "rpcName": "RunReport", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -3233,6 +3427,14 @@ def get_audience_list( # In C++ this would require a dynamic_cast return self._GetAudienceList(self._session, self._host, self._interceptor) # type: ignore + @property + def get_metadata( + self, + ) -> Callable[[analytics_data_api.GetMetadataRequest], analytics_data_api.Metadata]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetMetadata(self._session, self._host, self._interceptor) # type: ignore + @property def get_property_quotas_snapshot( self, @@ -3338,17 +3540,14 @@ def run_funnel_report( return self._RunFunnelReport(self._session, self._host, self._interceptor) # type: ignore @property - def sheet_export_audience_list( + def run_report( self, ) -> Callable[ - [analytics_data_api.SheetExportAudienceListRequest], - analytics_data_api.SheetExportAudienceListResponse, + [analytics_data_api.RunReportRequest], analytics_data_api.RunReportResponse ]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SheetExportAudienceList( - self._session, self._host, self._interceptor - ) # type: ignore + return self._RunReport(self._session, self._host, self._interceptor) # type: ignore @property def kind(self) -> str: diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py index 105330f52868..4e1bb2f4ef9a 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/services/alpha_analytics_data/transports/rest_base.py @@ -308,6 +308,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetMetadata: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/metadata}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_data_api.GetMetadataRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAlphaAnalyticsDataRestTransport._BaseGetMetadata._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetPropertyQuotasSnapshot: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -748,7 +795,7 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params - class _BaseSheetExportAudienceList: + class _BaseRunReport: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -767,7 +814,7 @@ def _get_http_options(): http_options: List[Dict[str, str]] = [ { "method": "post", - "uri": "/v1alpha/{name=properties/*/audienceLists/*}:exportSheet", + "uri": "/v1alpha/{property=properties/*}:runReport", "body": "*", }, ] @@ -775,7 +822,7 @@ def _get_http_options(): @staticmethod def _get_transcoded_request(http_options, request): - pb_request = analytics_data_api.SheetExportAudienceListRequest.pb(request) + pb_request = analytics_data_api.RunReportRequest.pb(request) transcoded_request = path_template.transcode(http_options, pb_request) return transcoded_request @@ -797,7 +844,7 @@ def _get_query_params_json(transcoded_request): ) ) query_params.update( - _BaseAlphaAnalyticsDataRestTransport._BaseSheetExportAudienceList._get_unset_required_fields( + _BaseAlphaAnalyticsDataRestTransport._BaseRunReport._get_unset_required_fields( query_params ) ) diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py index 65190daf2010..f63b29812fae 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/__init__.py @@ -23,6 +23,7 @@ CreateRecurringAudienceListRequest, CreateReportTaskRequest, GetAudienceListRequest, + GetMetadataRequest, GetPropertyQuotasSnapshotRequest, GetRecurringAudienceListRequest, GetReportTaskRequest, @@ -32,6 +33,7 @@ ListRecurringAudienceListsResponse, ListReportTasksRequest, ListReportTasksResponse, + Metadata, PropertyQuotasSnapshot, QueryAudienceListRequest, QueryAudienceListResponse, @@ -42,8 +44,8 @@ ReportTaskMetadata, RunFunnelReportRequest, RunFunnelReportResponse, - SheetExportAudienceListRequest, - SheetExportAudienceListResponse, + RunReportRequest, + RunReportResponse, WebhookNotification, ) from .data import ( @@ -52,10 +54,15 @@ CohortReportSettings, CohortSpec, CohortsRange, + Comparison, + ComparisonMetadata, + ConversionMetadata, + ConversionSpec, DateRange, Dimension, DimensionExpression, DimensionHeader, + DimensionMetadata, DimensionValue, EmptyFilter, EventCriteriaScoping, @@ -84,6 +91,7 @@ Metric, MetricAggregation, MetricHeader, + MetricMetadata, MetricType, MetricValue, NumericFilter, @@ -96,6 +104,7 @@ Row, SamplingLevel, SamplingMetadata, + Section, Segment, SegmentEventFilter, SegmentFilter, @@ -133,6 +142,7 @@ "CreateRecurringAudienceListRequest", "CreateReportTaskRequest", "GetAudienceListRequest", + "GetMetadataRequest", "GetPropertyQuotasSnapshotRequest", "GetRecurringAudienceListRequest", "GetReportTaskRequest", @@ -142,6 +152,7 @@ "ListRecurringAudienceListsResponse", "ListReportTasksRequest", "ListReportTasksResponse", + "Metadata", "PropertyQuotasSnapshot", "QueryAudienceListRequest", "QueryAudienceListResponse", @@ -152,18 +163,23 @@ "ReportTaskMetadata", "RunFunnelReportRequest", "RunFunnelReportResponse", - "SheetExportAudienceListRequest", - "SheetExportAudienceListResponse", + "RunReportRequest", + "RunReportResponse", "WebhookNotification", "BetweenFilter", "Cohort", "CohortReportSettings", "CohortSpec", "CohortsRange", + "Comparison", + "ComparisonMetadata", + "ConversionMetadata", + "ConversionSpec", "DateRange", "Dimension", "DimensionExpression", "DimensionHeader", + "DimensionMetadata", "DimensionValue", "EmptyFilter", "EventSegment", @@ -189,6 +205,7 @@ "InListFilter", "Metric", "MetricHeader", + "MetricMetadata", "MetricValue", "NumericFilter", "NumericValue", @@ -225,6 +242,7 @@ "MetricType", "RestrictedMetricType", "SamplingLevel", + "Section", "SessionCriteriaScoping", "SessionExclusionDuration", "UserCriteriaScoping", diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py index 8835dc589a2c..2bdf94597bb2 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/analytics_data_api.py @@ -41,8 +41,6 @@ "AudienceListMetadata", "QueryAudienceListRequest", "QueryAudienceListResponse", - "SheetExportAudienceListRequest", - "SheetExportAudienceListResponse", "AudienceRow", "AudienceDimension", "AudienceDimensionValue", @@ -56,6 +54,10 @@ "GetReportTaskRequest", "ListReportTasksRequest", "ListReportTasksResponse", + "RunReportRequest", + "RunReportResponse", + "GetMetadataRequest", + "Metadata", }, ) @@ -822,123 +824,6 @@ class QueryAudienceListResponse(proto.Message): ) -class SheetExportAudienceListRequest(proto.Message): - r"""A request to export users in an audience list to a Google - Sheet. - - Attributes: - name (str): - Required. The name of the audience list to retrieve users - from. Format: - ``properties/{property}/audienceLists/{audience_list}`` - offset (int): - Optional. The row count of the start row. The first row is - counted as row 0. - - When paging, the first request does not specify offset; or - equivalently, sets offset to 0; the first request returns - the first ``limit`` of rows. The second request sets offset - to the ``limit`` of the first request; the second request - returns the second ``limit`` of rows. - - To learn more about this pagination parameter, see - `Pagination `__. - limit (int): - Optional. The number of rows to return. If unspecified, - 10,000 rows are returned. The API returns a maximum of - 250,000 rows per request, no matter how many you ask for. - ``limit`` must be positive. - - The API can also return fewer rows than the requested - ``limit``, if there aren't as many dimension values as the - ``limit``. - - To learn more about this pagination parameter, see - `Pagination `__. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - offset: int = proto.Field( - proto.INT64, - number=2, - ) - limit: int = proto.Field( - proto.INT64, - number=3, - ) - - -class SheetExportAudienceListResponse(proto.Message): - r"""The created Google Sheet with the list of users in an - audience list. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - spreadsheet_uri (str): - A uri for you to visit in your browser to - view the Google Sheet. - - This field is a member of `oneof`_ ``_spreadsheet_uri``. - spreadsheet_id (str): - An ID that identifies the created Google - Sheet resource. - - This field is a member of `oneof`_ ``_spreadsheet_id``. - row_count (int): - The total number of rows in the AudienceList result. - ``rowCount`` is independent of the number of rows returned - in the response, the ``limit`` request parameter, and the - ``offset`` request parameter. For example if a query returns - 175 rows and includes ``limit`` of 50 in the API request, - the response will contain ``rowCount`` of 175 but only 50 - rows. - - To learn more about this pagination parameter, see - `Pagination `__. - - This field is a member of `oneof`_ ``_row_count``. - audience_list (google.analytics.data_v1alpha.types.AudienceList): - Configuration data about AudienceList being exported. - Returned to help interpret the AudienceList in the Google - Sheet of this response. - - For example, the AudienceList may have more rows than are - present in the Google Sheet, and in that case, you may want - to send an additional sheet export request with a different - ``offset`` value to retrieve the next page of rows in an - additional Google Sheet. - - This field is a member of `oneof`_ ``_audience_list``. - """ - - spreadsheet_uri: str = proto.Field( - proto.STRING, - number=1, - optional=True, - ) - spreadsheet_id: str = proto.Field( - proto.STRING, - number=2, - optional=True, - ) - row_count: int = proto.Field( - proto.INT32, - number=3, - optional=True, - ) - audience_list: "AudienceList" = proto.Field( - proto.MESSAGE, - number=4, - optional=True, - message="AudienceList", - ) - - class AudienceRow(proto.Message): r"""Dimension value attributes for the audience user row. @@ -1730,4 +1615,370 @@ def raw_page(self): ) +class RunReportRequest(proto.Message): + r"""The request to generate a report. + + Attributes: + property (str): + Required. A Google Analytics property identifier whose + events are tracked. Specified in the URL path and not the + body. To learn more, see `where to find your Property + ID `__. + Within a batch request, this property should either be + unspecified or consistent with the batch-level property. + + Example: properties/1234 + dimensions (MutableSequence[google.analytics.data_v1alpha.types.Dimension]): + Optional. The dimensions requested and + displayed. + metrics (MutableSequence[google.analytics.data_v1alpha.types.Metric]): + Optional. The metrics requested and + displayed. + date_ranges (MutableSequence[google.analytics.data_v1alpha.types.DateRange]): + Optional. Date ranges of data to read. If multiple date + ranges are requested, each response row will contain a zero + based date range index. If two date ranges overlap, the + event data for the overlapping days is included in the + response rows for both date ranges. In a cohort request, + this ``dateRanges`` must be unspecified. + dimension_filter (google.analytics.data_v1alpha.types.FilterExpression): + Optional. Dimension filters let you ask for only specific + dimension values in the report. To learn more, see + `Fundamentals of Dimension + Filters `__ + for examples. Metrics cannot be used in this filter. + metric_filter (google.analytics.data_v1alpha.types.FilterExpression): + Optional. The filter clause of metrics. + Applied after aggregating the report's rows, + similar to SQL having-clause. Dimensions cannot + be used in this filter. + offset (int): + Optional. The row count of the start row. The first row is + counted as row 0. + + When paging, the first request does not specify offset; or + equivalently, sets offset to 0; the first request returns + the first ``limit`` of rows. The second request sets offset + to the ``limit`` of the first request; the second request + returns the second ``limit`` of rows. + + To learn more about this pagination parameter, see + `Pagination `__. + limit (int): + Optional. The maximum number of rows to return. If + unspecified, 10,000 rows are returned. The API returns a + maximum of 250,000 rows per request, no matter how many you + ask for. ``limit`` must be positive. + + The API can also return fewer rows than the requested + ``limit``, if there aren't as many dimension values as the + ``limit``. For instance, there are fewer than 300 possible + values for the dimension ``country``, so when reporting on + only ``country``, you can't get more than 300 rows, even if + you set ``limit`` to a higher value. + + To learn more about this pagination parameter, see + `Pagination `__. + metric_aggregations (MutableSequence[google.analytics.data_v1alpha.types.MetricAggregation]): + Optional. Aggregation of metrics. Aggregated metric values + will be shown in rows where the dimension_values are set to + "RESERVED\_(MetricAggregation)". Aggregates including both + comparisons and multiple date ranges will be aggregated + based on the date ranges. + order_bys (MutableSequence[google.analytics.data_v1alpha.types.OrderBy]): + Optional. Specifies how rows are ordered in + the response. Requests including both + comparisons and multiple date ranges will have + order bys applied on the comparisons. + currency_code (str): + Optional. A currency code in ISO4217 format, + such as "AED", "USD", "JPY". If the field is + empty, the report uses the property's default + currency. + cohort_spec (google.analytics.data_v1alpha.types.CohortSpec): + Optional. Cohort group associated with this + request. If there is a cohort group in the + request the 'cohort' dimension must be present. + keep_empty_rows (bool): + Optional. If false or unspecified, each row with all metrics + equal to 0 will not be returned. If true, these rows will be + returned if they are not separately removed by a filter. + + Regardless of this ``keep_empty_rows`` setting, only data + recorded by the Google Analytics property can be displayed + in a report. + + For example if a property never logs a ``purchase`` event, + then a query for the ``eventName`` dimension and + ``eventCount`` metric will not have a row eventName: + "purchase" and eventCount: 0. + return_property_quota (bool): + Optional. Toggles whether to return the current state of + this Google Analytics property's quota. Quota is returned in + `PropertyQuota <#PropertyQuota>`__. + comparisons (MutableSequence[google.analytics.data_v1alpha.types.Comparison]): + Optional. The configuration of comparisons + requested and displayed. The request only + requires a comparisons field in order to receive + a comparison column in the response. + conversion_spec (google.analytics.data_v1alpha.types.ConversionSpec): + Optional. Controls conversion reporting. This + field is optional. If this field is set or any + conversion metrics are requested, the report + will be a conversion report. + """ + + property: str = proto.Field( + proto.STRING, + number=1, + ) + dimensions: MutableSequence[data.Dimension] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.Dimension, + ) + metrics: MutableSequence[data.Metric] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Metric, + ) + date_ranges: MutableSequence[data.DateRange] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.DateRange, + ) + dimension_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=5, + message=data.FilterExpression, + ) + metric_filter: data.FilterExpression = proto.Field( + proto.MESSAGE, + number=6, + message=data.FilterExpression, + ) + offset: int = proto.Field( + proto.INT64, + number=7, + ) + limit: int = proto.Field( + proto.INT64, + number=8, + ) + metric_aggregations: MutableSequence[data.MetricAggregation] = proto.RepeatedField( + proto.ENUM, + number=9, + enum=data.MetricAggregation, + ) + order_bys: MutableSequence[data.OrderBy] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message=data.OrderBy, + ) + currency_code: str = proto.Field( + proto.STRING, + number=11, + ) + cohort_spec: data.CohortSpec = proto.Field( + proto.MESSAGE, + number=12, + message=data.CohortSpec, + ) + keep_empty_rows: bool = proto.Field( + proto.BOOL, + number=13, + ) + return_property_quota: bool = proto.Field( + proto.BOOL, + number=14, + ) + comparisons: MutableSequence[data.Comparison] = proto.RepeatedField( + proto.MESSAGE, + number=15, + message=data.Comparison, + ) + conversion_spec: data.ConversionSpec = proto.Field( + proto.MESSAGE, + number=16, + message=data.ConversionSpec, + ) + + +class RunReportResponse(proto.Message): + r"""The response report table corresponding to a request. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + dimension_headers (MutableSequence[google.analytics.data_v1alpha.types.DimensionHeader]): + Describes dimension columns. The number of + DimensionHeaders and ordering of + DimensionHeaders matches the dimensions present + in rows. + metric_headers (MutableSequence[google.analytics.data_v1alpha.types.MetricHeader]): + Describes metric columns. The number of + MetricHeaders and ordering of MetricHeaders + matches the metrics present in rows. + rows (MutableSequence[google.analytics.data_v1alpha.types.Row]): + Rows of dimension value combinations and + metric values in the report. + totals (MutableSequence[google.analytics.data_v1alpha.types.Row]): + If requested, the totaled values of metrics. + maximums (MutableSequence[google.analytics.data_v1alpha.types.Row]): + If requested, the maximum values of metrics. + minimums (MutableSequence[google.analytics.data_v1alpha.types.Row]): + If requested, the minimum values of metrics. + row_count (int): + The total number of rows in the query result, regardless of + the number of rows returned in the response. For example if + a query returns 175 rows and includes limit = 50 in the API + request, the response will contain row_count = 175 but only + 50 rows. + + To learn more about this pagination parameter, see + `Pagination `__. + metadata (google.analytics.data_v1alpha.types.ResponseMetaData): + Metadata for the report. + property_quota (google.analytics.data_v1alpha.types.PropertyQuota): + This Analytics Property's quota state + including this request. + kind (str): + Identifies what kind of resource this message is. This + ``kind`` is always the fixed string + "analyticsData#runReport". Useful to distinguish between + response types in JSON. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + + This field is a member of `oneof`_ ``_next_page_token``. + """ + + @property + def raw_page(self): + return self + + dimension_headers: MutableSequence[data.DimensionHeader] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionHeader, + ) + metric_headers: MutableSequence[data.MetricHeader] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricHeader, + ) + rows: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=data.Row, + ) + totals: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.Row, + ) + maximums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.Row, + ) + minimums: MutableSequence[data.Row] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=data.Row, + ) + row_count: int = proto.Field( + proto.INT32, + number=7, + ) + metadata: data.ResponseMetaData = proto.Field( + proto.MESSAGE, + number=8, + message=data.ResponseMetaData, + ) + property_quota: data.PropertyQuota = proto.Field( + proto.MESSAGE, + number=9, + message=data.PropertyQuota, + ) + kind: str = proto.Field( + proto.STRING, + number=10, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=11, + optional=True, + ) + + +class GetMetadataRequest(proto.Message): + r"""Request for a property's dimension and metric metadata. + + Attributes: + name (str): + Required. The resource name of the metadata to retrieve. + This name field is specified in the URL path and not URL + parameters. Property is a numeric Google Analytics property + identifier. To learn more, see `where to find your Property + ID `__. + + Example: properties/1234/metadata + + Set the Property ID to 0 for dimensions and metrics common + to all properties. In this special mode, this method will + not return custom dimensions and metrics. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class Metadata(proto.Message): + r"""The dimensions, metrics and comparisons currently accepted in + reporting methods. + + Attributes: + name (str): + Resource name of this metadata. + dimensions (MutableSequence[google.analytics.data_v1alpha.types.DimensionMetadata]): + The dimension descriptions. + metrics (MutableSequence[google.analytics.data_v1alpha.types.MetricMetadata]): + The metric descriptions. + comparisons (MutableSequence[google.analytics.data_v1alpha.types.ComparisonMetadata]): + The comparison descriptions. + conversions (MutableSequence[google.analytics.data_v1alpha.types.ConversionMetadata]): + The conversion descriptions. + """ + + name: str = proto.Field( + proto.STRING, + number=3, + ) + dimensions: MutableSequence[data.DimensionMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=data.DimensionMetadata, + ) + metrics: MutableSequence[data.MetricMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=data.MetricMetadata, + ) + comparisons: MutableSequence[data.ComparisonMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=data.ComparisonMetadata, + ) + conversions: MutableSequence[data.ConversionMetadata] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=data.ConversionMetadata, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py index d7d184615eae..7b003d58f2b0 100644 --- a/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py +++ b/packages/google-analytics-data/google/analytics/data_v1alpha/types/data.py @@ -23,6 +23,7 @@ __protobuf__ = proto.module( package="google.analytics.data.v1alpha", manifest={ + "Section", "UserCriteriaScoping", "UserExclusionDuration", "SessionCriteriaScoping", @@ -37,6 +38,7 @@ "Dimension", "DimensionExpression", "Metric", + "Comparison", "FilterExpression", "FilterExpressionList", "Filter", @@ -97,10 +99,43 @@ "FunnelParameterFilter", "FunnelResponseMetadata", "SamplingMetadata", + "ConversionSpec", + "DimensionMetadata", + "MetricMetadata", + "ComparisonMetadata", + "ConversionMetadata", }, ) +class Section(proto.Enum): + r"""Identifies if the report data is from the standard report + data or conversion data + + Values: + SECTION_UNSPECIFIED (0): + Should never be specified. + SECTION_REPORT (1): + The report data is from the standard report + data. Google Analytics reports include + acquisition, engagement, and user behavior + reports. Reports use dimensions like session + source & landing page; reports use metrics like + sessions, views, and engagement time. + SECTION_ADVERTISING (2): + The report data is from the conversion data. + The Google Analytics Advertising section reports + on conversion performance. Advertising reports + use dimensions like source & medium; advertising + reports use metrics like all conversions and ads + cost. + """ + + SECTION_UNSPECIFIED = 0 + SECTION_REPORT = 1 + SECTION_ADVERTISING = 2 + + class UserCriteriaScoping(proto.Enum): r"""Scoping specifies which events are considered when evaluating if a user meets a criteria. @@ -581,6 +616,56 @@ class Metric(proto.Message): ) +class Comparison(proto.Message): + r"""Defines an individual comparison. Most requests will include + multiple comparisons so that the report compares between the + comparisons. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + Each comparison produces separate rows in the + response. In the response, this comparison is + identified by this name. If name is unspecified, + we will use the saved comparisons display name. + + This field is a member of `oneof`_ ``_name``. + dimension_filter (google.analytics.data_v1alpha.types.FilterExpression): + A basic comparison. + + This field is a member of `oneof`_ ``one_comparison``. + comparison (str): + A saved comparison identified by the + comparison's resource name. For example, + 'comparisons/1234'. + + This field is a member of `oneof`_ ``one_comparison``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + dimension_filter: "FilterExpression" = proto.Field( + proto.MESSAGE, + number=2, + oneof="one_comparison", + message="FilterExpression", + ) + comparison: str = proto.Field( + proto.STRING, + number=3, + oneof="one_comparison", + ) + + class FilterExpression(proto.Message): r"""To express dimension or metric filters. The fields in the same FilterExpression need to be either all dimensions or all @@ -1303,15 +1388,17 @@ class ResponseMetaData(proto.Message): This field is a member of `oneof`_ ``_subject_to_thresholding``. sampling_metadatas (MutableSequence[google.analytics.data_v1alpha.types.SamplingMetadata]): - If this report's results are + If this report results is `sampled `__, this describes the percentage of events used in this report. One ``samplingMetadatas`` is populated for each date range. Each ``samplingMetadatas`` corresponds to a date range in - the order that date ranges were specified in the request. + order that date ranges were specified in the request. However if the results are not sampled, this field will not be defined. + section (google.analytics.data_v1alpha.types.Section): + Identifies the type of data in the report. """ class SchemaRestrictionResponse(proto.Message): @@ -1398,6 +1485,11 @@ class ActiveMetricRestriction(proto.Message): number=9, message="SamplingMetadata", ) + section: "Section" = proto.Field( + proto.ENUM, + number=10, + enum="Section", + ) class DimensionHeader(proto.Message): @@ -3190,8 +3282,8 @@ class FunnelResponseMetadata(proto.Message): this describes what percentage of events were used in this funnel report. One ``samplingMetadatas`` is populated for each date range. Each ``samplingMetadatas`` corresponds to a - date range in the order that date ranges were specified in - the request. + date range in order that date ranges were specified in the + request. However if the results are not sampled, this field will not be defined. @@ -3240,4 +3332,303 @@ class SamplingMetadata(proto.Message): ) +class ConversionSpec(proto.Message): + r"""Controls conversion reporting. + + + + Attributes: + conversion_actions (MutableSequence[str]): + The conversion action IDs to include in the report. If + empty, all conversions are included. Valid conversion action + IDs can be retrieved from the ``conversion_action`` field + within the ``conversions`` list in the response of the + ``GetMetadata`` method. For example, + 'conversionActions/1234'. + attribution_model (google.analytics.data_v1alpha.types.ConversionSpec.AttributionModel): + The attribution model to use in the Conversion Report. If + unspecified, ``DATA_DRIVEN`` is used. + """ + + class AttributionModel(proto.Enum): + r"""Attribution model to use in the Conversion Report + + Values: + ATTRIBUTION_MODEL_UNSPECIFIED (0): + Unspecified attribution model. + DATA_DRIVEN (1): + Attribution was based on the paid and organic + data driven model + LAST_CLICK (2): + Attribution was based on the paid and organic + last click model + """ + + ATTRIBUTION_MODEL_UNSPECIFIED = 0 + DATA_DRIVEN = 1 + LAST_CLICK = 2 + + conversion_actions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + attribution_model: AttributionModel = proto.Field( + proto.ENUM, + number=2, + enum=AttributionModel, + ) + + +class DimensionMetadata(proto.Message): + r"""Explains a dimension. + + Attributes: + api_name (str): + This dimension's name. Usable in + `Dimension <#Dimension>`__'s ``name``. For example, + ``eventName``. + ui_name (str): + This dimension's name within the Google Analytics user + interface. For example, ``Event name``. + description (str): + Description of how this dimension is used and + calculated. + deprecated_api_names (MutableSequence[str]): + Still usable but deprecated names for this dimension. If + populated, this dimension is available by either ``apiName`` + or one of ``deprecatedApiNames`` for a period of time. After + the deprecation period, the dimension will be available only + by ``apiName``. + custom_definition (bool): + True if the dimension is custom to this + property. This includes user, event, & item + scoped custom dimensions; to learn more about + custom dimensions, see + https://support.google.com/analytics/answer/14240153. + This also include custom channel groups; to + learn more about custom channel groups, see + https://support.google.com/analytics/answer/13051316. + category (str): + The display name of the category that this + dimension belongs to. Similar dimensions and + metrics are categorized together. + sections (MutableSequence[google.analytics.data_v1alpha.types.Section]): + Specifies the Google Analytics sections this + dimension applies to. + """ + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + custom_definition: bool = proto.Field( + proto.BOOL, + number=5, + ) + category: str = proto.Field( + proto.STRING, + number=6, + ) + sections: MutableSequence["Section"] = proto.RepeatedField( + proto.ENUM, + number=7, + enum="Section", + ) + + +class MetricMetadata(proto.Message): + r"""Explains a metric. + + Attributes: + api_name (str): + A metric name. Usable in `Metric <#Metric>`__'s ``name``. + For example, ``eventCount``. + ui_name (str): + This metric's name within the Google Analytics user + interface. For example, ``Event count``. + description (str): + Description of how this metric is used and + calculated. + deprecated_api_names (MutableSequence[str]): + Still usable but deprecated names for this metric. If + populated, this metric is available by either ``apiName`` or + one of ``deprecatedApiNames`` for a period of time. After + the deprecation period, the metric will be available only by + ``apiName``. + type_ (google.analytics.data_v1alpha.types.MetricType): + The type of this metric. + expression (str): + The mathematical expression for this derived metric. Can be + used in `Metric <#Metric>`__'s ``expression`` field for + equivalent reports. Most metrics are not expressions, and + for non-expressions, this field is empty. + custom_definition (bool): + True if the metric is a custom metric for + this property. + blocked_reasons (MutableSequence[google.analytics.data_v1alpha.types.MetricMetadata.BlockedReason]): + If reasons are specified, your access is blocked to this + metric for this property. API requests from you to this + property for this metric will succeed; however, the report + will contain only zeros for this metric. API requests with + metric filters on blocked metrics will fail. If reasons are + empty, you have access to this metric. + + To learn more, see `Access and data-restriction + management `__. + category (str): + The display name of the category that this + metrics belongs to. Similar dimensions and + metrics are categorized together. + sections (MutableSequence[google.analytics.data_v1alpha.types.Section]): + Specifies the Google Analytics sections this + metric applies to. + """ + + class BlockedReason(proto.Enum): + r"""Justifications for why this metric is blocked. + + Values: + BLOCKED_REASON_UNSPECIFIED (0): + Will never be specified in API response. + NO_REVENUE_METRICS (1): + If present, your access is blocked to revenue + related metrics for this property, and this + metric is revenue related. + NO_COST_METRICS (2): + If present, your access is blocked to cost + related metrics for this property, and this + metric is cost related. + """ + + BLOCKED_REASON_UNSPECIFIED = 0 + NO_REVENUE_METRICS = 1 + NO_COST_METRICS = 2 + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + deprecated_api_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + type_: "MetricType" = proto.Field( + proto.ENUM, + number=5, + enum="MetricType", + ) + expression: str = proto.Field( + proto.STRING, + number=6, + ) + custom_definition: bool = proto.Field( + proto.BOOL, + number=7, + ) + blocked_reasons: MutableSequence[BlockedReason] = proto.RepeatedField( + proto.ENUM, + number=8, + enum=BlockedReason, + ) + category: str = proto.Field( + proto.STRING, + number=9, + ) + sections: MutableSequence["Section"] = proto.RepeatedField( + proto.ENUM, + number=10, + enum="Section", + ) + + +class ComparisonMetadata(proto.Message): + r"""The metadata for a single comparison. + + Attributes: + api_name (str): + This comparison's resource name. Usable in + `Comparison <#Comparison>`__'s ``comparison`` field. For + example, 'comparisons/1234'. + ui_name (str): + This comparison's name within the Google + Analytics user interface. + description (str): + This comparison's description. + """ + + api_name: str = proto.Field( + proto.STRING, + number=1, + ) + ui_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ConversionMetadata(proto.Message): + r"""The metadata for a single conversion. + + + + Attributes: + conversion_action (str): + The unique identifier of the conversion action. This ID is + used to specify which conversions to include in a report by + populating the ``conversion_actions`` field in the + ``ConversionsSpec`` of a report request. For example, + 'conversionActions/1234'. + display_name (str): + This conversion's name within the Google + Analytics user interface. + """ + + conversion_action: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-data/noxfile.py b/packages/google-analytics-data/noxfile.py index 202141840a7d..91e2913611ac 100644 --- a/packages/google-analytics-data/noxfile.py +++ b/packages/google-analytics-data/noxfile.py @@ -390,7 +390,6 @@ def docs(session): shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors "-b", diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_async.py similarity index 79% rename from packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py rename to packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_async.py index ea9a4c23590f..9d0b7c670c79 100644 --- a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_async.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for SheetExportAudienceList +# Snippet for GetMetadata # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-analytics-data -# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_async] +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetMetadata_async] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.analytics import data_v1alpha -async def sample_sheet_export_audience_list(): +async def sample_get_metadata(): # Create a client client = data_v1alpha.AlphaAnalyticsDataAsyncClient() # Initialize request argument(s) - request = data_v1alpha.SheetExportAudienceListRequest( + request = data_v1alpha.GetMetadataRequest( name="name_value", ) # Make the request - response = await client.sheet_export_audience_list(request=request) + response = await client.get_metadata(request=request) # Handle the response print(response) -# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_async] +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetMetadata_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_sync.py similarity index 80% rename from packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py rename to packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_sync.py index 89ed25c465d8..80441e9666a2 100644 --- a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_sync.py @@ -15,7 +15,7 @@ # # Generated code. DO NOT EDIT! # -# Snippet for SheetExportAudienceList +# Snippet for GetMetadata # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. @@ -23,7 +23,7 @@ # python3 -m pip install google-analytics-data -# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_sync] +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetMetadata_sync] # This snippet has been automatically generated and should be regarded as a # code template only. # It will require modifications to work: @@ -34,20 +34,20 @@ from google.analytics import data_v1alpha -def sample_sheet_export_audience_list(): +def sample_get_metadata(): # Create a client client = data_v1alpha.AlphaAnalyticsDataClient() # Initialize request argument(s) - request = data_v1alpha.SheetExportAudienceListRequest( + request = data_v1alpha.GetMetadataRequest( name="name_value", ) # Make the request - response = client.sheet_export_audience_list(request=request) + response = client.get_metadata(request=request) # Handle the response print(response) -# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_sync] +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetMetadata_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_async.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_async.py new file mode 100644 index 000000000000..b08c5e2d0940 --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunReport_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +async def sample_run_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataAsyncClient() + + # Initialize request argument(s) + request = data_v1alpha.RunReportRequest( + property="property_value", + ) + + # Make the request + response = await client.run_report(request=request) + + # Handle the response + print(response) + + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunReport_async] diff --git a/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_sync.py b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_sync.py new file mode 100644 index 000000000000..27dfeaaaabf1 --- /dev/null +++ b/packages/google-analytics-data/samples/generated_samples/analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for RunReport +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-analytics-data + + +# [START analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunReport_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.analytics import data_v1alpha + + +def sample_run_report(): + # Create a client + client = data_v1alpha.AlphaAnalyticsDataClient() + + # Initialize request argument(s) + request = data_v1alpha.RunReportRequest( + property="property_value", + ) + + # Make the request + response = client.run_report(request=request) + + # Handle the response + print(response) + + +# [END analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunReport_sync] diff --git a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json index be713ddd7edf..6a32318310e9 100644 --- a/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json +++ b/packages/google-analytics-data/samples/generated_samples/snippet_metadata_google.analytics.data.v1alpha.json @@ -679,6 +679,167 @@ ], "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_audience_list_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", + "shortName": "AlphaAnalyticsDataAsyncClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.get_metadata", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetMetadata", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetMetadataRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.Metadata", + "shortName": "get_metadata" + }, + "description": "Sample for GetMetadata", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetMetadata_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", + "shortName": "AlphaAnalyticsDataClient" + }, + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.get_metadata", + "method": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.GetMetadata", + "service": { + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", + "shortName": "AlphaAnalyticsData" + }, + "shortName": "GetMetadata" + }, + "parameters": [ + { + "name": "request", + "type": "google.analytics.data_v1alpha.types.GetMetadataRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.analytics.data_v1alpha.types.Metadata", + "shortName": "get_metadata" + }, + "description": "Sample for GetMetadata", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_GetMetadata_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_get_metadata_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2128,23 +2289,19 @@ "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient", "shortName": "AlphaAnalyticsDataAsyncClient" }, - "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.sheet_export_audience_list", + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataAsyncClient.run_report", "method": { - "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.SheetExportAudienceList", + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.RunReport", "service": { "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", "shortName": "AlphaAnalyticsData" }, - "shortName": "SheetExportAudienceList" + "shortName": "RunReport" }, "parameters": [ { "name": "request", - "type": "google.analytics.data_v1alpha.types.SheetExportAudienceListRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.analytics.data_v1alpha.types.RunReportRequest" }, { "name": "retry", @@ -2159,14 +2316,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.analytics.data_v1alpha.types.SheetExportAudienceListResponse", - "shortName": "sheet_export_audience_list" + "resultType": "google.analytics.data_v1alpha.types.RunReportResponse", + "shortName": "run_report" }, - "description": "Sample for SheetExportAudienceList", - "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py", + "description": "Sample for RunReport", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_async", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunReport_async", "segments": [ { "end": 51, @@ -2199,7 +2356,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_async.py" + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_async.py" }, { "canonical": true, @@ -2208,23 +2365,19 @@ "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient", "shortName": "AlphaAnalyticsDataClient" }, - "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.sheet_export_audience_list", + "fullName": "google.analytics.data_v1alpha.AlphaAnalyticsDataClient.run_report", "method": { - "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.SheetExportAudienceList", + "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData.RunReport", "service": { "fullName": "google.analytics.data.v1alpha.AlphaAnalyticsData", "shortName": "AlphaAnalyticsData" }, - "shortName": "SheetExportAudienceList" + "shortName": "RunReport" }, "parameters": [ { "name": "request", - "type": "google.analytics.data_v1alpha.types.SheetExportAudienceListRequest" - }, - { - "name": "name", - "type": "str" + "type": "google.analytics.data_v1alpha.types.RunReportRequest" }, { "name": "retry", @@ -2239,14 +2392,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.analytics.data_v1alpha.types.SheetExportAudienceListResponse", - "shortName": "sheet_export_audience_list" + "resultType": "google.analytics.data_v1alpha.types.RunReportResponse", + "shortName": "run_report" }, - "description": "Sample for SheetExportAudienceList", - "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py", + "description": "Sample for RunReport", + "file": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_SheetExportAudienceList_sync", + "regionTag": "analyticsdata_v1alpha_generated_AlphaAnalyticsData_RunReport_sync", "segments": [ { "end": 51, @@ -2279,7 +2432,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_sheet_export_audience_list_sync.py" + "title": "analyticsdata_v1alpha_generated_alpha_analytics_data_run_report_sync.py" } ] } diff --git a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py index c55650dc959d..4cb9bd396d55 100644 --- a/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py +++ b/packages/google-analytics-data/tests/unit/gapic/data_v1alpha/test_alpha_analytics_data.py @@ -1354,9 +1354,6 @@ def test_alpha_analytics_data_client_create_channel_credentials_file( default_scopes=( "https://www.googleapis.com/auth/analytics", "https://www.googleapis.com/auth/analytics.readonly", - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/spreadsheets", ), scopes=None, default_host="analyticsdata.googleapis.com", @@ -2323,11 +2320,11 @@ async def test_query_audience_list_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.SheetExportAudienceListRequest, + analytics_data_api.GetAudienceListRequest, dict, ], ) -def test_sheet_export_audience_list(request_type, transport: str = "grpc"): +def test_get_audience_list(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2339,30 +2336,42 @@ def test_sheet_export_audience_list(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", + call.return_value = analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) - response = client.sheet_export_audience_list(request) + response = client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" -def test_sheet_export_audience_list_non_empty_request_with_auto_populated_field(): +def test_get_audience_list_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -2373,26 +2382,26 @@ def test_sheet_export_audience_list_non_empty_request_with_auto_populated_field( # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.SheetExportAudienceListRequest( + request = analytics_data_api.GetAudienceListRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.sheet_export_audience_list(request=request) + client.get_audience_list(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.SheetExportAudienceListRequest( + assert args[0] == analytics_data_api.GetAudienceListRequest( name="name_value", ) -def test_sheet_export_audience_list_use_cached_wrapped_rpc(): +def test_get_audience_list_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2406,26 +2415,23 @@ def test_sheet_export_audience_list_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.sheet_export_audience_list - in client._transport._wrapped_methods - ) + assert client._transport.get_audience_list in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.sheet_export_audience_list - ] = mock_rpc + client._transport._wrapped_methods[client._transport.get_audience_list] = ( + mock_rpc + ) request = {} - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.sheet_export_audience_list(request) + client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2433,7 +2439,7 @@ def test_sheet_export_audience_list_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc( +async def test_get_audience_list_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2450,7 +2456,7 @@ async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.sheet_export_audience_list + client._client._transport.get_audience_list in client._client._transport._wrapped_methods ) @@ -2458,16 +2464,16 @@ async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.sheet_export_audience_list + client._client._transport.get_audience_list ] = mock_rpc request = {} - await client.sheet_export_audience_list(request) + await client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.sheet_export_audience_list(request) + await client.get_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2475,9 +2481,9 @@ async def test_sheet_export_audience_list_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_sheet_export_audience_list_async( +async def test_get_audience_list_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.SheetExportAudienceListRequest, + request_type=analytics_data_api.GetAudienceListRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -2490,53 +2496,65 @@ async def test_sheet_export_audience_list_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", + analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) ) - response = await client.sheet_export_audience_list(request) + response = await client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" @pytest.mark.asyncio -async def test_sheet_export_audience_list_async_from_dict(): - await test_sheet_export_audience_list_async(request_type=dict) +async def test_get_audience_list_async_from_dict(): + await test_get_audience_list_async(request_type=dict) -def test_sheet_export_audience_list_field_headers(): +def test_get_audience_list_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: - call.return_value = analytics_data_api.SheetExportAudienceListResponse() - client.sheet_export_audience_list(request) + call.return_value = analytics_data_api.AudienceList() + client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2552,25 +2570,25 @@ def test_sheet_export_audience_list_field_headers(): @pytest.mark.asyncio -async def test_sheet_export_audience_list_field_headers_async(): +async def test_get_audience_list_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.SheetExportAudienceListResponse() + analytics_data_api.AudienceList() ) - await client.sheet_export_audience_list(request) + await client.get_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2585,20 +2603,20 @@ async def test_sheet_export_audience_list_field_headers_async(): ) in kw["metadata"] -def test_sheet_export_audience_list_flattened(): +def test_get_audience_list_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.SheetExportAudienceListResponse() + call.return_value = analytics_data_api.AudienceList() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.sheet_export_audience_list( + client.get_audience_list( name="name_value", ) @@ -2611,7 +2629,7 @@ def test_sheet_export_audience_list_flattened(): assert arg == mock_val -def test_sheet_export_audience_list_flattened_error(): +def test_get_audience_list_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2619,31 +2637,31 @@ def test_sheet_export_audience_list_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_sheet_export_audience_list_flattened_async(): +async def test_get_audience_list_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" + type(client.transport.get_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.SheetExportAudienceListResponse() + call.return_value = analytics_data_api.AudienceList() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.SheetExportAudienceListResponse() + analytics_data_api.AudienceList() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.sheet_export_audience_list( + response = await client.get_audience_list( name="name_value", ) @@ -2657,7 +2675,7 @@ async def test_sheet_export_audience_list_flattened_async(): @pytest.mark.asyncio -async def test_sheet_export_audience_list_flattened_error_async(): +async def test_get_audience_list_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) @@ -2665,8 +2683,8 @@ async def test_sheet_export_audience_list_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + await client.get_audience_list( + analytics_data_api.GetAudienceListRequest(), name="name_value", ) @@ -2674,11 +2692,11 @@ async def test_sheet_export_audience_list_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetAudienceListRequest, + analytics_data_api.ListAudienceListsRequest, dict, ], ) -def test_get_audience_list(request_type, transport: str = "grpc"): +def test_list_audience_lists(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -2690,42 +2708,26 @@ def test_get_audience_list(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.list_audience_lists), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.AudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + call.return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) - response = client.get_audience_list(request) + response = client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_get_audience_list_non_empty_request_with_auto_populated_field(): +def test_list_audience_lists_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -2736,26 +2738,28 @@ def test_get_audience_list_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetAudienceListRequest( - name="name_value", + request = analytics_data_api.ListAudienceListsRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.list_audience_lists), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_audience_list(request=request) + client.list_audience_lists(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetAudienceListRequest( - name="name_value", + assert args[0] == analytics_data_api.ListAudienceListsRequest( + parent="parent_value", + page_token="page_token_value", ) -def test_get_audience_list_use_cached_wrapped_rpc(): +def test_list_audience_lists_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -2769,23 +2773,25 @@ def test_get_audience_list_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_audience_list in client._transport._wrapped_methods + assert ( + client._transport.list_audience_lists in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_audience_list] = ( + client._transport._wrapped_methods[client._transport.list_audience_lists] = ( mock_rpc ) request = {} - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_audience_list(request) + client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2793,7 +2799,7 @@ def test_get_audience_list_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_audience_list_async_use_cached_wrapped_rpc( +async def test_list_audience_lists_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -2810,7 +2816,7 @@ async def test_get_audience_list_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_audience_list + client._client._transport.list_audience_lists in client._client._transport._wrapped_methods ) @@ -2818,16 +2824,16 @@ async def test_get_audience_list_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_audience_list + client._client._transport.list_audience_lists ] = mock_rpc request = {} - await client.get_audience_list(request) + await client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_audience_list(request) + await client.list_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -2835,9 +2841,9 @@ async def test_get_audience_list_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_audience_list_async( +async def test_list_audience_lists_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetAudienceListRequest, + request_type=analytics_data_api.ListAudienceListsRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -2850,65 +2856,49 @@ async def test_get_audience_list_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.list_audience_lists), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.AudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.get_audience_list(request) + response = await client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert isinstance(response, pagers.ListAudienceListsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_audience_list_async_from_dict(): - await test_get_audience_list_async(request_type=dict) +async def test_list_audience_lists_async_from_dict(): + await test_list_audience_lists_async(request_type=dict) -def test_get_audience_list_field_headers(): +def test_list_audience_lists_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.list_audience_lists), "__call__" ) as call: - call.return_value = analytics_data_api.AudienceList() - client.get_audience_list(request) + call.return_value = analytics_data_api.ListAudienceListsResponse() + client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -2919,30 +2909,30 @@ def test_get_audience_list_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_audience_list_field_headers_async(): +async def test_list_audience_lists_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.list_audience_lists), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.AudienceList() + analytics_data_api.ListAudienceListsResponse() ) - await client.get_audience_list(request) + await client.list_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -2953,37 +2943,37 @@ async def test_get_audience_list_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_get_audience_list_flattened(): +def test_list_audience_lists_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.list_audience_lists), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.AudienceList() + call.return_value = analytics_data_api.ListAudienceListsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_audience_list( - name="name_value", + client.list_audience_lists( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_get_audience_list_flattened_error(): +def test_list_audience_lists_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -2991,45 +2981,621 @@ def test_get_audience_list_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_audience_list( - analytics_data_api.GetAudienceListRequest(), - name="name_value", + client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_get_audience_list_flattened_async(): +async def test_list_audience_lists_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListAudienceListsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListAudienceListsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_audience_lists( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_audience_lists_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_audience_lists( + analytics_data_api.ListAudienceListsRequest(), + parent="parent_value", + ) + + +def test_list_audience_lists_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_audience_lists(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) + + +def test_list_audience_lists_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + pages = list(client.list_audience_lists(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_audience_lists_async_pager(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_audience_lists( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.AudienceList) for i in responses) + + +@pytest.mark.asyncio +async def test_list_audience_lists_async_pages(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_audience_lists), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListAudienceListsResponse( + audience_lists=[ + analytics_data_api.AudienceList(), + analytics_data_api.AudienceList(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_audience_lists(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.CreateRecurringAudienceListRequest, + dict, + ], +) +def test_create_recurring_audience_list(request_type, transport: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], + ) + response = client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateRecurringAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] + + +def test_create_recurring_audience_list_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_data_api.CreateRecurringAudienceListRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_recurring_audience_list(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_data_api.CreateRecurringAudienceListRequest( + parent="parent_value", + ) + + +def test_create_recurring_audience_list_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_recurring_audience_list + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_recurring_audience_list + ] = mock_rpc + request = {} + client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_recurring_audience_list + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_recurring_audience_list + ] = mock_rpc + + request = {} + await client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_recurring_audience_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_async( + transport: str = "grpc_asyncio", + request_type=analytics_data_api.CreateRecurringAudienceListRequest, +): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), + transport=transport, ) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_audience_list), "__call__" + type(client.transport.create_recurring_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.AudienceList() + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], + ) + ) + response = await client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_data_api.CreateRecurringAudienceListRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_async_from_dict(): + await test_create_recurring_audience_list_async(request_type=dict) + + +def test_create_recurring_audience_list_field_headers(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateRecurringAudienceListRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), "__call__" + ) as call: + call.return_value = analytics_data_api.RecurringAudienceList() + client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_field_headers_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_data_api.CreateRecurringAudienceListRequest() + + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), "__call__" + ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.AudienceList() + analytics_data_api.RecurringAudienceList() ) + await client.create_recurring_audience_list(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_recurring_audience_list_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_audience_list( - name="name_value", + client.create_recurring_audience_list( + parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].recurring_audience_list + mock_val = analytics_data_api.RecurringAudienceList(name="name_value") + assert arg == mock_val + + +def test_create_recurring_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), + parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), + ) + + +@pytest.mark.asyncio +async def test_create_recurring_audience_list_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_recurring_audience_list), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.RecurringAudienceList() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_recurring_audience_list( + parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].recurring_audience_list + mock_val = analytics_data_api.RecurringAudienceList(name="name_value") assert arg == mock_val @pytest.mark.asyncio -async def test_get_audience_list_flattened_error_async(): +async def test_create_recurring_audience_list_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) @@ -3037,20 +3603,23 @@ async def test_get_audience_list_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_audience_list( - analytics_data_api.GetAudienceListRequest(), - name="name_value", + await client.create_recurring_audience_list( + analytics_data_api.CreateRecurringAudienceListRequest(), + parent="parent_value", + recurring_audience_list=analytics_data_api.RecurringAudienceList( + name="name_value" + ), ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListAudienceListsRequest, + analytics_data_api.GetRecurringAudienceListRequest, dict, ], ) -def test_list_audience_lists(request_type, transport: str = "grpc"): +def test_get_recurring_audience_list(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3062,26 +3631,34 @@ def test_list_audience_lists(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" + type(client.transport.get_recurring_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) - response = client.list_audience_lists(request) + response = client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] -def test_list_audience_lists_non_empty_request_with_auto_populated_field(): +def test_get_recurring_audience_list_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -3092,28 +3669,26 @@ def test_list_audience_lists_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListAudienceListsRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_data_api.GetRecurringAudienceListRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" + type(client.transport.get_recurring_audience_list), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_audience_lists(request=request) + client.get_recurring_audience_list(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListAudienceListsRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_data_api.GetRecurringAudienceListRequest( + name="name_value", ) -def test_list_audience_lists_use_cached_wrapped_rpc(): +def test_get_recurring_audience_list_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3128,7 +3703,8 @@ def test_list_audience_lists_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_audience_lists in client._transport._wrapped_methods + client._transport.get_recurring_audience_list + in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -3136,16 +3712,16 @@ def test_list_audience_lists_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_audience_lists] = ( - mock_rpc - ) + client._transport._wrapped_methods[ + client._transport.get_recurring_audience_list + ] = mock_rpc request = {} - client.list_audience_lists(request) + client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_audience_lists(request) + client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3153,7 +3729,7 @@ def test_list_audience_lists_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_audience_lists_async_use_cached_wrapped_rpc( +async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3170,7 +3746,7 @@ async def test_list_audience_lists_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_audience_lists + client._client._transport.get_recurring_audience_list in client._client._transport._wrapped_methods ) @@ -3178,16 +3754,16 @@ async def test_list_audience_lists_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_audience_lists + client._client._transport.get_recurring_audience_list ] = mock_rpc request = {} - await client.list_audience_lists(request) + await client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_audience_lists(request) + await client.get_recurring_audience_list(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3195,9 +3771,9 @@ async def test_list_audience_lists_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_audience_lists_async( +async def test_get_recurring_audience_list_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListAudienceListsRequest, + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -3210,49 +3786,57 @@ async def test_list_audience_lists_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" + type(client.transport.get_recurring_audience_list), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) ) - response = await client.list_audience_lists(request) + response = await client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] @pytest.mark.asyncio -async def test_list_audience_lists_async_from_dict(): - await test_list_audience_lists_async(request_type=dict) +async def test_get_recurring_audience_list_async_from_dict(): + await test_get_recurring_audience_list_async(request_type=dict) -def test_list_audience_lists_field_headers(): +def test_get_recurring_audience_list_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" + type(client.transport.get_recurring_audience_list), "__call__" ) as call: - call.return_value = analytics_data_api.ListAudienceListsResponse() - client.list_audience_lists(request) + call.return_value = analytics_data_api.RecurringAudienceList() + client.get_recurring_audience_list(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3263,340 +3847,138 @@ def test_list_audience_lists_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_audience_lists_field_headers_async(): +async def test_get_recurring_audience_list_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListAudienceListsRequest() - - request.parent = "parent_value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListAudienceListsResponse() - ) - await client.list_audience_lists(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - "x-goog-request-params", - "parent=parent_value", - ) in kw["metadata"] - - -def test_list_audience_lists_flattened(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListAudienceListsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_audience_lists( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_audience_lists_flattened_error(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_audience_lists( - analytics_data_api.ListAudienceListsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_audience_lists_flattened_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListAudienceListsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListAudienceListsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_audience_lists( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_audience_lists_flattened_error_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_audience_lists( - analytics_data_api.ListAudienceListsRequest(), - parent="parent_value", - ) - - -def test_list_audience_lists_pager(transport_name: str = "grpc"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) + request = analytics_data_api.GetRecurringAudienceListRequest() - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - ), - RuntimeError, - ) + request.name = "name_value" - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_recurring_audience_list), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.RecurringAudienceList() ) - pager = client.list_audience_lists(request={}, retry=retry, timeout=timeout) + await client.get_recurring_audience_list(request) - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.AudienceList) for i in results) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_audience_lists_pages(transport_name: str = "grpc"): +def test_get_recurring_audience_list_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_audience_lists), "__call__" + type(client.transport.get_recurring_audience_list), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_recurring_audience_list( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_recurring_audience_list_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name="name_value", ) - pages = list(client.list_audience_lists(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_audience_lists_async_pager(): +async def test_get_recurring_audience_list_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_audience_lists), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.get_recurring_audience_list), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.RecurringAudienceList() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.RecurringAudienceList() ) - async_pager = await client.list_audience_lists( - request={}, + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_recurring_audience_list( + name="name_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, analytics_data_api.AudienceList) for i in responses) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_list_audience_lists_async_pages(): +async def test_get_recurring_audience_list_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_audience_lists), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListAudienceListsResponse( - audience_lists=[ - analytics_data_api.AudienceList(), - analytics_data_api.AudienceList(), - ], - ), - RuntimeError, + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_recurring_audience_list( + analytics_data_api.GetRecurringAudienceListRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_audience_lists(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateRecurringAudienceListRequest, + analytics_data_api.ListRecurringAudienceListsRequest, dict, ], ) -def test_create_recurring_audience_list(request_type, transport: str = "grpc"): +def test_list_recurring_audience_lists(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3608,34 +3990,26 @@ def test_create_recurring_audience_list(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) - response = client.create_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == "next_page_token_value" -def test_create_recurring_audience_list_non_empty_request_with_auto_populated_field(): +def test_list_recurring_audience_lists_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -3646,26 +4020,28 @@ def test_create_recurring_audience_list_non_empty_request_with_auto_populated_fi # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.CreateRecurringAudienceListRequest( + request = analytics_data_api.ListRecurringAudienceListsRequest( parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_recurring_audience_list(request=request) + client.list_recurring_audience_lists(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateRecurringAudienceListRequest( + assert args[0] == analytics_data_api.ListRecurringAudienceListsRequest( parent="parent_value", + page_token="page_token_value", ) -def test_create_recurring_audience_list_use_cached_wrapped_rpc(): +def test_list_recurring_audience_lists_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3680,7 +4056,7 @@ def test_create_recurring_audience_list_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.create_recurring_audience_list + client._transport.list_recurring_audience_lists in client._transport._wrapped_methods ) @@ -3690,15 +4066,15 @@ def test_create_recurring_audience_list_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.create_recurring_audience_list + client._transport.list_recurring_audience_lists ] = mock_rpc request = {} - client.create_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.create_recurring_audience_list(request) + client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3706,7 +4082,7 @@ def test_create_recurring_audience_list_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( +async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -3723,7 +4099,7 @@ async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_recurring_audience_list + client._client._transport.list_recurring_audience_lists in client._client._transport._wrapped_methods ) @@ -3731,16 +4107,16 @@ async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_recurring_audience_list + client._client._transport.list_recurring_audience_lists ] = mock_rpc request = {} - await client.create_recurring_audience_list(request) + await client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.create_recurring_audience_list(request) + await client.list_recurring_audience_lists(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -3748,9 +4124,9 @@ async def test_create_recurring_audience_list_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_recurring_audience_list_async( +async def test_list_recurring_audience_lists_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.CreateRecurringAudienceListRequest, + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -3763,57 +4139,49 @@ async def test_create_recurring_audience_list_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) ) - response = await client.create_recurring_audience_list(request) + response = await client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_create_recurring_audience_list_async_from_dict(): - await test_create_recurring_audience_list_async(request_type=dict) +async def test_list_recurring_audience_lists_async_from_dict(): + await test_list_recurring_audience_lists_async(request_type=dict) -def test_create_recurring_audience_list_field_headers(): +def test_list_recurring_audience_lists_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), "__call__" ) as call: - call.return_value = analytics_data_api.RecurringAudienceList() - client.create_recurring_audience_list(request) + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -3829,25 +4197,25 @@ def test_create_recurring_audience_list_field_headers(): @pytest.mark.asyncio -async def test_create_recurring_audience_list_field_headers_async(): +async def test_list_recurring_audience_lists_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RecurringAudienceList() + analytics_data_api.ListRecurringAudienceListsResponse() ) - await client.create_recurring_audience_list(request) + await client.list_recurring_audience_lists(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -3862,24 +4230,21 @@ async def test_create_recurring_audience_list_field_headers_async(): ) in kw["metadata"] -def test_create_recurring_audience_list_flattened(): +def test_list_recurring_audience_lists_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RecurringAudienceList() + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_recurring_audience_list( + client.list_recurring_audience_lists( parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" - ), ) # Establish that the underlying call was made with the expected @@ -3889,12 +4254,9 @@ def test_create_recurring_audience_list_flattened(): arg = args[0].parent mock_val = "parent_value" assert arg == mock_val - arg = args[0].recurring_audience_list - mock_val = analytics_data_api.RecurringAudienceList(name="name_value") - assert arg == mock_val -def test_create_recurring_audience_list_flattened_error(): +def test_list_recurring_audience_lists_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -3902,78 +4264,274 @@ def test_create_recurring_audience_list_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_recurring_audience_list( - analytics_data_api.CreateRecurringAudienceListRequest(), + client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" + ) + + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.ListRecurringAudienceListsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_recurring_audience_lists( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_recurring_audience_lists( + analytics_data_api.ListRecurringAudienceListsRequest(), + parent="parent_value", + ) + + +def test_list_recurring_audience_lists_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_recurring_audience_lists( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, analytics_data_api.RecurringAudienceList) for i in results + ) + + +def test_list_recurring_audience_lists_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + pages = list(client.list_recurring_audience_lists(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_recurring_audience_lists_async_pager(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_recurring_audience_lists), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_recurring_audience_lists( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, analytics_data_api.RecurringAudienceList) for i in responses ) @pytest.mark.asyncio -async def test_create_recurring_audience_list_flattened_async(): +async def test_list_recurring_audience_lists_async_pages(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.create_recurring_audience_list), "__call__" + type(client.transport.list_recurring_audience_lists), + "__call__", + new_callable=mock.AsyncMock, ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RecurringAudienceList() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RecurringAudienceList() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_recurring_audience_list( - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="abc", ), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].recurring_audience_list - mock_val = analytics_data_api.RecurringAudienceList(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_recurring_audience_list_flattened_error_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_recurring_audience_list( - analytics_data_api.CreateRecurringAudienceListRequest(), - parent="parent_value", - recurring_audience_list=analytics_data_api.RecurringAudienceList( - name="name_value" + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[], + next_page_token="def", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListRecurringAudienceListsResponse( + recurring_audience_lists=[ + analytics_data_api.RecurringAudienceList(), + analytics_data_api.RecurringAudienceList(), + ], ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_recurring_audience_lists(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetRecurringAudienceListRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_get_recurring_audience_list(request_type, transport: str = "grpc"): +def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3985,34 +4543,26 @@ def test_get_recurring_audience_list(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RecurringAudienceList( + call.return_value = analytics_data_api.PropertyQuotasSnapshot( name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], ) - response = client.get_recurring_audience_list(request) + response = client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] -def test_get_recurring_audience_list_non_empty_request_with_auto_populated_field(): +def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -4023,26 +4573,26 @@ def test_get_recurring_audience_list_non_empty_request_with_auto_populated_field # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetRecurringAudienceListRequest( + request = analytics_data_api.GetPropertyQuotasSnapshotRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_recurring_audience_list(request=request) + client.get_property_quotas_snapshot(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetRecurringAudienceListRequest( + assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( name="name_value", ) -def test_get_recurring_audience_list_use_cached_wrapped_rpc(): +def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4057,7 +4607,7 @@ def test_get_recurring_audience_list_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_recurring_audience_list + client._transport.get_property_quotas_snapshot in client._transport._wrapped_methods ) @@ -4067,15 +4617,15 @@ def test_get_recurring_audience_list_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.get_recurring_audience_list + client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - client.get_recurring_audience_list(request) + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_recurring_audience_list(request) + client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4083,7 +4633,7 @@ def test_get_recurring_audience_list_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( +async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4100,7 +4650,7 @@ async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_recurring_audience_list + client._client._transport.get_property_quotas_snapshot in client._client._transport._wrapped_methods ) @@ -4108,16 +4658,16 @@ async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_recurring_audience_list + client._client._transport.get_property_quotas_snapshot ] = mock_rpc request = {} - await client.get_recurring_audience_list(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_recurring_audience_list(request) + await client.get_property_quotas_snapshot(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4125,9 +4675,9 @@ async def test_get_recurring_audience_list_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_recurring_audience_list_async( +async def test_get_property_quotas_snapshot_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetRecurringAudienceListRequest, + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -4140,57 +4690,49 @@ async def test_get_recurring_audience_list_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RecurringAudienceList( + analytics_data_api.PropertyQuotasSnapshot( name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], ) ) - response = await client.get_recurring_audience_list(request) + response = await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] @pytest.mark.asyncio -async def test_get_recurring_audience_list_async_from_dict(): - await test_get_recurring_audience_list_async(request_type=dict) +async def test_get_property_quotas_snapshot_async_from_dict(): + await test_get_property_quotas_snapshot_async(request_type=dict) -def test_get_recurring_audience_list_field_headers(): +def test_get_property_quotas_snapshot_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: - call.return_value = analytics_data_api.RecurringAudienceList() - client.get_recurring_audience_list(request) + call.return_value = analytics_data_api.PropertyQuotasSnapshot() + client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4206,25 +4748,25 @@ def test_get_recurring_audience_list_field_headers(): @pytest.mark.asyncio -async def test_get_recurring_audience_list_field_headers_async(): +async def test_get_property_quotas_snapshot_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RecurringAudienceList() + analytics_data_api.PropertyQuotasSnapshot() ) - await client.get_recurring_audience_list(request) + await client.get_property_quotas_snapshot(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4239,20 +4781,20 @@ async def test_get_recurring_audience_list_field_headers_async(): ) in kw["metadata"] -def test_get_recurring_audience_list_flattened(): +def test_get_property_quotas_snapshot_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RecurringAudienceList() + call.return_value = analytics_data_api.PropertyQuotasSnapshot() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_recurring_audience_list( + client.get_property_quotas_snapshot( name="name_value", ) @@ -4265,7 +4807,7 @@ def test_get_recurring_audience_list_flattened(): assert arg == mock_val -def test_get_recurring_audience_list_flattened_error(): +def test_get_property_quotas_snapshot_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -4273,31 +4815,31 @@ def test_get_recurring_audience_list_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_recurring_audience_list( - analytics_data_api.GetRecurringAudienceListRequest(), + client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_recurring_audience_list_flattened_async(): +async def test_get_property_quotas_snapshot_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_recurring_audience_list), "__call__" + type(client.transport.get_property_quotas_snapshot), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.RecurringAudienceList() + call.return_value = analytics_data_api.PropertyQuotasSnapshot() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.RecurringAudienceList() + analytics_data_api.PropertyQuotasSnapshot() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_recurring_audience_list( + response = await client.get_property_quotas_snapshot( name="name_value", ) @@ -4311,7 +4853,7 @@ async def test_get_recurring_audience_list_flattened_async(): @pytest.mark.asyncio -async def test_get_recurring_audience_list_flattened_error_async(): +async def test_get_property_quotas_snapshot_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) @@ -4319,8 +4861,8 @@ async def test_get_recurring_audience_list_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_recurring_audience_list( - analytics_data_api.GetRecurringAudienceListRequest(), + await client.get_property_quotas_snapshot( + analytics_data_api.GetPropertyQuotasSnapshotRequest(), name="name_value", ) @@ -4328,11 +4870,11 @@ async def test_get_recurring_audience_list_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListRecurringAudienceListsRequest, + analytics_data_api.CreateReportTaskRequest, dict, ], ) -def test_list_recurring_audience_lists(request_type, transport: str = "grpc"): +def test_create_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4344,26 +4886,23 @@ def test_list_recurring_audience_lists(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_recurring_audience_lists(request) + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) -def test_list_recurring_audience_lists_non_empty_request_with_auto_populated_field(): +def test_create_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -4374,28 +4913,26 @@ def test_list_recurring_audience_lists_non_empty_request_with_auto_populated_fie # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListRecurringAudienceListsRequest( + request = analytics_data_api.CreateReportTaskRequest( parent="parent_value", - page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_recurring_audience_lists(request=request) + client.create_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListRecurringAudienceListsRequest( + assert args[0] == analytics_data_api.CreateReportTaskRequest( parent="parent_value", - page_token="page_token_value", ) -def test_list_recurring_audience_lists_use_cached_wrapped_rpc(): +def test_create_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4410,8 +4947,7 @@ def test_list_recurring_audience_lists_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.list_recurring_audience_lists - in client._transport._wrapped_methods + client._transport.create_report_task in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -4419,16 +4955,21 @@ def test_list_recurring_audience_lists_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_recurring_audience_lists - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_report_task] = ( + mock_rpc + ) request = {} - client.list_recurring_audience_lists(request) + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_recurring_audience_lists(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4436,7 +4977,7 @@ def test_list_recurring_audience_lists_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( +async def test_create_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -4453,7 +4994,7 @@ async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_recurring_audience_lists + client._client._transport.create_report_task in client._client._transport._wrapped_methods ) @@ -4461,16 +5002,21 @@ async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_recurring_audience_lists + client._client._transport.create_report_task ] = mock_rpc request = {} - await client.list_recurring_audience_lists(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_recurring_audience_lists(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4478,9 +5024,9 @@ async def test_list_recurring_audience_lists_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_recurring_audience_lists_async( +async def test_create_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListRecurringAudienceListsRequest, + request_type=analytics_data_api.CreateReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -4493,49 +5039,46 @@ async def test_list_recurring_audience_lists_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", - ) + operations_pb2.Operation(name="operations/spam") ) - response = await client.list_recurring_audience_lists(request) + response = await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.CreateReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, future.Future) @pytest.mark.asyncio -async def test_list_recurring_audience_lists_async_from_dict(): - await test_list_recurring_audience_lists_async(request_type=dict) +async def test_create_report_task_async_from_dict(): + await test_create_report_task_async(request_type=dict) -def test_list_recurring_audience_lists_field_headers(): +def test_create_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.CreateReportTaskRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: - call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - client.list_recurring_audience_lists(request) + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -4551,25 +5094,25 @@ def test_list_recurring_audience_lists_field_headers(): @pytest.mark.asyncio -async def test_list_recurring_audience_lists_field_headers_async(): +async def test_create_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.CreateReportTaskRequest() request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" + type(client.transport.create_report_task), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListRecurringAudienceListsResponse() + operations_pb2.Operation(name="operations/op") ) - await client.list_recurring_audience_lists(request) + await client.create_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -4584,308 +5127,110 @@ async def test_list_recurring_audience_lists_field_headers_async(): ) in kw["metadata"] -def test_list_recurring_audience_lists_flattened(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_recurring_audience_lists( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_recurring_audience_lists_flattened_error(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_recurring_audience_lists( - analytics_data_api.ListRecurringAudienceListsRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_recurring_audience_lists_flattened_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListRecurringAudienceListsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_recurring_audience_lists( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_recurring_audience_lists_flattened_error_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_recurring_audience_lists( - analytics_data_api.ListRecurringAudienceListsRequest(), - parent="parent_value", - ) - - -def test_list_recurring_audience_lists_pager(transport_name: str = "grpc"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_recurring_audience_lists( - request={}, retry=retry, timeout=timeout - ) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all( - isinstance(i, analytics_data_api.RecurringAudienceList) for i in results - ) - - -def test_list_recurring_audience_lists_pages(transport_name: str = "grpc"): +def test_create_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_recurring_audience_lists), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - RuntimeError, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_report_task), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") + assert arg == mock_val + + +def test_create_report_task_flattened_error(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) - pages = list(client.list_recurring_audience_lists(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.asyncio -async def test_list_recurring_audience_lists_async_pager(): +async def test_create_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.list_recurring_audience_lists), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.create_report_task), "__call__" ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - RuntimeError, + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") ) - async_pager = await client.list_recurring_audience_lists( - request={}, + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_report_task( + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all( - isinstance(i, analytics_data_api.RecurringAudienceList) for i in responses - ) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].report_task + mock_val = analytics_data_api.ReportTask(name="name_value") + assert arg == mock_val @pytest.mark.asyncio -async def test_list_recurring_audience_lists_async_pages(): +async def test_create_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_recurring_audience_lists), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="abc", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[], - next_page_token="def", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListRecurringAudienceListsResponse( - recurring_audience_lists=[ - analytics_data_api.RecurringAudienceList(), - analytics_data_api.RecurringAudienceList(), - ], - ), - RuntimeError, + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_report_task( + analytics_data_api.CreateReportTaskRequest(), + parent="parent_value", + report_task=analytics_data_api.ReportTask(name="name_value"), ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_recurring_audience_lists(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetPropertyQuotasSnapshotRequest, + analytics_data_api.QueryReportTaskRequest, dict, ], ) -def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): +def test_query_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -4897,26 +5242,26 @@ def test_get_property_quotas_snapshot(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.PropertyQuotasSnapshot( - name="name_value", + call.return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, ) - response = client.get_property_quotas_snapshot(request) + response = client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 -def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_field(): +def test_query_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -4927,26 +5272,26 @@ def test_get_property_quotas_snapshot_non_empty_request_with_auto_populated_fiel # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetPropertyQuotasSnapshotRequest( + request = analytics_data_api.QueryReportTaskRequest( name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_property_quotas_snapshot(request=request) + client.query_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetPropertyQuotasSnapshotRequest( + assert args[0] == analytics_data_api.QueryReportTaskRequest( name="name_value", ) -def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): +def test_query_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -4960,26 +5305,23 @@ def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_property_quotas_snapshot - in client._transport._wrapped_methods - ) + assert client._transport.query_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_property_quotas_snapshot - ] = mock_rpc + client._transport._wrapped_methods[client._transport.query_report_task] = ( + mock_rpc + ) request = {} - client.get_property_quotas_snapshot(request) + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_property_quotas_snapshot(request) + client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -4987,7 +5329,7 @@ def test_get_property_quotas_snapshot_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( +async def test_query_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5004,7 +5346,7 @@ async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_property_quotas_snapshot + client._client._transport.query_report_task in client._client._transport._wrapped_methods ) @@ -5012,16 +5354,16 @@ async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_property_quotas_snapshot + client._client._transport.query_report_task ] = mock_rpc request = {} - await client.get_property_quotas_snapshot(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_property_quotas_snapshot(request) + await client.query_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5029,9 +5371,9 @@ async def test_get_property_quotas_snapshot_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_property_quotas_snapshot_async( +async def test_query_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, + request_type=analytics_data_api.QueryReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -5044,49 +5386,49 @@ async def test_get_property_quotas_snapshot_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.PropertyQuotasSnapshot( - name="name_value", + analytics_data_api.QueryReportTaskResponse( + row_count=992, ) ) - response = await client.get_property_quotas_snapshot(request) + response = await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + request = analytics_data_api.QueryReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 @pytest.mark.asyncio -async def test_get_property_quotas_snapshot_async_from_dict(): - await test_get_property_quotas_snapshot_async(request_type=dict) +async def test_query_report_task_async_from_dict(): + await test_query_report_task_async(request_type=dict) -def test_get_property_quotas_snapshot_field_headers(): +def test_query_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: - call.return_value = analytics_data_api.PropertyQuotasSnapshot() - client.get_property_quotas_snapshot(request) + call.return_value = analytics_data_api.QueryReportTaskResponse() + client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5102,25 +5444,25 @@ def test_get_property_quotas_snapshot_field_headers(): @pytest.mark.asyncio -async def test_get_property_quotas_snapshot_field_headers_async(): +async def test_query_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + request = analytics_data_api.QueryReportTaskRequest() request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.PropertyQuotasSnapshot() + analytics_data_api.QueryReportTaskResponse() ) - await client.get_property_quotas_snapshot(request) + await client.query_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5135,20 +5477,20 @@ async def test_get_property_quotas_snapshot_field_headers_async(): ) in kw["metadata"] -def test_get_property_quotas_snapshot_flattened(): +def test_query_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.PropertyQuotasSnapshot() + call.return_value = analytics_data_api.QueryReportTaskResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_property_quotas_snapshot( + client.query_report_task( name="name_value", ) @@ -5161,7 +5503,7 @@ def test_get_property_quotas_snapshot_flattened(): assert arg == mock_val -def test_get_property_quotas_snapshot_flattened_error(): +def test_query_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5169,31 +5511,31 @@ def test_get_property_quotas_snapshot_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_property_quotas_snapshot( - analytics_data_api.GetPropertyQuotasSnapshotRequest(), + client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @pytest.mark.asyncio -async def test_get_property_quotas_snapshot_flattened_async(): +async def test_query_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.get_property_quotas_snapshot), "__call__" + type(client.transport.query_report_task), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.PropertyQuotasSnapshot() + call.return_value = analytics_data_api.QueryReportTaskResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.PropertyQuotasSnapshot() + analytics_data_api.QueryReportTaskResponse() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_property_quotas_snapshot( + response = await client.query_report_task( name="name_value", ) @@ -5207,7 +5549,7 @@ async def test_get_property_quotas_snapshot_flattened_async(): @pytest.mark.asyncio -async def test_get_property_quotas_snapshot_flattened_error_async(): +async def test_query_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5215,8 +5557,8 @@ async def test_get_property_quotas_snapshot_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.get_property_quotas_snapshot( - analytics_data_api.GetPropertyQuotasSnapshotRequest(), + await client.query_report_task( + analytics_data_api.QueryReportTaskRequest(), name="name_value", ) @@ -5224,11 +5566,11 @@ async def test_get_property_quotas_snapshot_flattened_error_async(): @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateReportTaskRequest, + analytics_data_api.GetReportTaskRequest, dict, ], ) -def test_create_report_task(request_type, transport: str = "grpc"): +def test_get_report_task(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5239,24 +5581,25 @@ def test_create_report_task(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/spam") - response = client.create_report_task(request) + call.return_value = analytics_data_api.ReportTask( + name="name_value", + ) + response = client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" -def test_create_report_task_non_empty_request_with_auto_populated_field(): +def test_get_report_task_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5267,26 +5610,24 @@ def test_create_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + request = analytics_data_api.GetReportTaskRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.create_report_task(request=request) + client.get_report_task(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.CreateReportTaskRequest( - parent="parent_value", + assert args[0] == analytics_data_api.GetReportTaskRequest( + name="name_value", ) -def test_create_report_task_use_cached_wrapped_rpc(): +def test_get_report_task_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5300,30 +5641,21 @@ def test_create_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.create_report_task in client._transport._wrapped_methods - ) + assert client._transport.get_report_task in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_report_task] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc request = {} - client.create_report_task(request) + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.create_report_task(request) + client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5331,7 +5663,7 @@ def test_create_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_create_report_task_async_use_cached_wrapped_rpc( +async def test_get_report_task_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5348,7 +5680,7 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.create_report_task + client._client._transport.get_report_task in client._client._transport._wrapped_methods ) @@ -5356,21 +5688,16 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.create_report_task + client._client._transport.get_report_task ] = mock_rpc request = {} - await client.create_report_task(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods call wrapper_fn to build a cached - # client._transport.operations_client instance on first rpc call. - # Subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - await client.create_report_task(request) + await client.get_report_task(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5378,9 +5705,9 @@ async def test_create_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_create_report_task_async( +async def test_get_report_task_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.CreateReportTaskRequest, + request_type=analytics_data_api.GetReportTaskRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -5392,47 +5719,46 @@ async def test_create_report_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.ReportTask( + name="name_value", + ) ) - response = await client.create_report_task(request) + response = await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetReportTaskRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, future.Future) + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_create_report_task_async_from_dict(): - await test_create_report_task_async(request_type=dict) +async def test_get_report_task_async_from_dict(): + await test_get_report_task_async(request_type=dict) -def test_create_report_task_field_headers(): +def test_get_report_task_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_report_task(request) + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + call.return_value = analytics_data_api.ReportTask() + client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5443,30 +5769,28 @@ def test_create_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_create_report_task_field_headers_async(): +async def test_get_report_task_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetReportTaskRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/op") + analytics_data_api.ReportTask() ) - await client.create_report_task(request) + await client.get_report_task(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5477,41 +5801,35 @@ async def test_create_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_create_report_task_flattened(): +def test_get_report_task_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.ReportTask() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -def test_create_report_task_flattened_error(): +def test_get_report_task_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5519,50 +5837,43 @@ def test_create_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) @pytest.mark.asyncio -async def test_create_report_task_flattened_async(): +async def test_get_report_task_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_report_task), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = operations_pb2.Operation(name="operations/op") + call.return_value = analytics_data_api.ReportTask() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + analytics_data_api.ReportTask() ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.create_report_task( - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + response = await client.get_report_task( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].report_task - mock_val = analytics_data_api.ReportTask(name="name_value") + arg = args[0].name + mock_val = "name_value" assert arg == mock_val @pytest.mark.asyncio -async def test_create_report_task_flattened_error_async(): +async def test_get_report_task_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) @@ -5570,21 +5881,20 @@ async def test_create_report_task_flattened_error_async(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - await client.create_report_task( - analytics_data_api.CreateReportTaskRequest(), - parent="parent_value", - report_task=analytics_data_api.ReportTask(name="name_value"), + await client.get_report_task( + analytics_data_api.GetReportTaskRequest(), + name="name_value", ) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryReportTaskRequest, + analytics_data_api.ListReportTasksRequest, dict, ], ) -def test_query_report_task(request_type, transport: str = "grpc"): +def test_list_report_tasks(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5596,26 +5906,26 @@ def test_query_report_task(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse( - row_count=992, + call.return_value = analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) - response = client.query_report_task(request) + response = client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.ListReportTasksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == "next_page_token_value" -def test_query_report_task_non_empty_request_with_auto_populated_field(): +def test_list_report_tasks_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5626,26 +5936,28 @@ def test_query_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.QueryReportTaskRequest( - name="name_value", + request = analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.query_report_task(request=request) + client.list_report_tasks(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.QueryReportTaskRequest( - name="name_value", + assert args[0] == analytics_data_api.ListReportTasksRequest( + parent="parent_value", + page_token="page_token_value", ) -def test_query_report_task_use_cached_wrapped_rpc(): +def test_list_report_tasks_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5659,23 +5971,23 @@ def test_query_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.query_report_task in client._transport._wrapped_methods + assert client._transport.list_report_tasks in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.query_report_task] = ( + client._transport._wrapped_methods[client._transport.list_report_tasks] = ( mock_rpc ) request = {} - client.query_report_task(request) + client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.query_report_task(request) + client.list_report_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5683,7 +5995,7 @@ def test_query_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_query_report_task_async_use_cached_wrapped_rpc( +async def test_list_report_tasks_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -5700,7 +6012,7 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.query_report_task + client._client._transport.list_report_tasks in client._client._transport._wrapped_methods ) @@ -5708,16 +6020,16 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.query_report_task + client._client._transport.list_report_tasks ] = mock_rpc request = {} - await client.query_report_task(request) + await client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.query_report_task(request) + await client.list_report_tasks(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -5725,9 +6037,9 @@ async def test_query_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_query_report_task_async( +async def test_list_report_tasks_async( transport: str = "grpc_asyncio", - request_type=analytics_data_api.QueryReportTaskRequest, + request_type=analytics_data_api.ListReportTasksRequest, ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -5740,49 +6052,49 @@ async def test_query_report_task_async( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse( - row_count=992, + analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) ) - response = await client.query_report_task(request) + response = await client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.ListReportTasksRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, pagers.ListReportTasksAsyncPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_query_report_task_async_from_dict(): - await test_query_report_task_async(request_type=dict) +async def test_list_report_tasks_async_from_dict(): + await test_list_report_tasks_async(request_type=dict) -def test_query_report_task_field_headers(): +def test_list_report_tasks_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.ListReportTasksRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: - call.return_value = analytics_data_api.QueryReportTaskResponse() - client.query_report_task(request) + call.return_value = analytics_data_api.ListReportTasksResponse() + client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -5793,30 +6105,30 @@ def test_query_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_query_report_task_field_headers_async(): +async def test_list_report_tasks_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.ListReportTasksRequest() - request.name = "name_value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + analytics_data_api.ListReportTasksResponse() ) - await client.query_report_task(request) + await client.list_report_tasks(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -5827,37 +6139,37 @@ async def test_query_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "parent=parent_value", ) in kw["metadata"] -def test_query_report_task_flattened(): +def test_list_report_tasks_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = analytics_data_api.ListReportTasksResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.query_report_task( - name="name_value", + client.list_report_tasks( + parent="parent_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" + arg = args[0].parent + mock_val = "parent_value" assert arg == mock_val -def test_query_report_task_flattened_error(): +def test_list_report_tasks_flattened_error(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) @@ -5865,66 +6177,268 @@ def test_query_report_task_flattened_error(): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", ) @pytest.mark.asyncio -async def test_query_report_task_flattened_async(): +async def test_list_report_tasks_flattened_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client.transport.query_report_task), "__call__" + type(client.transport.list_report_tasks), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.QueryReportTaskResponse() + call.return_value = analytics_data_api.ListReportTasksResponse() call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.QueryReportTaskResponse() + analytics_data_api.ListReportTasksResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_report_tasks( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_report_tasks_flattened_error_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_report_tasks( + analytics_data_api.ListReportTasksRequest(), + parent="parent_value", + ) + + +def test_list_report_tasks_pager(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + + +def test_list_report_tasks_pages(transport_name: str = "grpc"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, + ) + pages = list(client.list_report_tasks(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_report_tasks_async_pager(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.query_report_task( - name="name_value", + async_pager = await client.list_report_tasks( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val + assert len(responses) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in responses) @pytest.mark.asyncio -async def test_query_report_task_flattened_error_async(): +async def test_list_report_tasks_async_pages(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.query_report_task( - analytics_data_api.QueryReportTaskRequest(), - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_report_tasks), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + next_page_token="abc", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[], + next_page_token="def", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + ], + next_page_token="ghi", + ), + analytics_data_api.ListReportTasksResponse( + report_tasks=[ + analytics_data_api.ReportTask(), + analytics_data_api.ReportTask(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_report_tasks(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetReportTaskRequest, + analytics_data_api.RunReportRequest, dict, ], ) -def test_get_report_task(request_type, transport: str = "grpc"): +def test_run_report(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -5935,25 +6449,30 @@ def test_get_report_task(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object(type(client.transport.run_report), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask( - name="name_value", + call.return_value = analytics_data_api.RunReportResponse( + row_count=992, + kind="kind_value", + next_page_token="next_page_token_value", ) - response = client.get_report_task(request) + response = client.run_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.RunReportRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert response.raw_page is response + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" -def test_get_report_task_non_empty_request_with_auto_populated_field(): +def test_run_report_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -5964,24 +6483,26 @@ def test_get_report_task_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.GetReportTaskRequest( - name="name_value", + request = analytics_data_api.RunReportRequest( + property="property_value", + currency_code="currency_code_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object(type(client.transport.run_report), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.get_report_task(request=request) + client.run_report(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.GetReportTaskRequest( - name="name_value", + assert args[0] == analytics_data_api.RunReportRequest( + property="property_value", + currency_code="currency_code_value", ) -def test_get_report_task_use_cached_wrapped_rpc(): +def test_run_report_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -5995,21 +6516,21 @@ def test_get_report_task_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_report_task in client._transport._wrapped_methods + assert client._transport.run_report in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_report_task] = mock_rpc + client._transport._wrapped_methods[client._transport.run_report] = mock_rpc request = {} - client.get_report_task(request) + client.run_report(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_report_task(request) + client.run_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6017,9 +6538,7 @@ def test_get_report_task_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_get_report_task_async_use_cached_wrapped_rpc( - transport: str = "grpc_asyncio", -): +async def test_run_report_async_use_cached_wrapped_rpc(transport: str = "grpc_asyncio"): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: @@ -6034,7 +6553,7 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.get_report_task + client._client._transport.run_report in client._client._transport._wrapped_methods ) @@ -6042,16 +6561,16 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.get_report_task + client._client._transport.run_report ] = mock_rpc request = {} - await client.get_report_task(request) + await client.run_report(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.get_report_task(request) + await client.run_report(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6059,9 +6578,8 @@ async def test_get_report_task_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_get_report_task_async( - transport: str = "grpc_asyncio", - request_type=analytics_data_api.GetReportTaskRequest, +async def test_run_report_async( + transport: str = "grpc_asyncio", request_type=analytics_data_api.RunReportRequest ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -6073,46 +6591,50 @@ async def test_get_report_task_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object(type(client.transport.run_report), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask( - name="name_value", + analytics_data_api.RunReportResponse( + row_count=992, + kind="kind_value", + next_page_token="next_page_token_value", ) ) - response = await client.get_report_task(request) + response = await client.run_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.RunReportRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" @pytest.mark.asyncio -async def test_get_report_task_async_from_dict(): - await test_get_report_task_async(request_type=dict) +async def test_run_report_async_from_dict(): + await test_run_report_async(request_type=dict) -def test_get_report_task_field_headers(): +def test_run_report_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.RunReportRequest() - request.name = "name_value" + request.property = "property_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - call.return_value = analytics_data_api.ReportTask() - client.get_report_task(request) + with mock.patch.object(type(client.transport.run_report), "__call__") as call: + call.return_value = analytics_data_api.RunReportResponse() + client.run_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6123,28 +6645,28 @@ def test_get_report_task_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "property=property_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_get_report_task_field_headers_async(): +async def test_run_report_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.RunReportRequest() - request.name = "name_value" + request.property = "property_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: + with mock.patch.object(type(client.transport.run_report), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() + analytics_data_api.RunReportResponse() ) - await client.get_report_task(request) + await client.run_report(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6155,100 +6677,18 @@ async def test_get_report_task_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "name=name_value", + "property=property_value", ) in kw["metadata"] -def test_get_report_task_flattened(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_report_task( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_report_task_flattened_error(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_report_task( - analytics_data_api.GetReportTaskRequest(), - name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_report_task_flattened_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_report_task), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ReportTask() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ReportTask() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_report_task( - name="name_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_report_task_flattened_error_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_report_task( - analytics_data_api.GetReportTaskRequest(), - name="name_value", - ) - - @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListReportTasksRequest, + analytics_data_api.GetMetadataRequest, dict, ], ) -def test_list_report_tasks(request_type, transport: str = "grpc"): +def test_get_metadata(request_type, transport: str = "grpc"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -6259,27 +6699,25 @@ def test_list_report_tasks(request_type, transport: str = "grpc"): request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + call.return_value = analytics_data_api.Metadata( + name="name_value", ) - response = client.list_report_tasks(request) + response = client.get_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetMetadataRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == "name_value" -def test_list_report_tasks_non_empty_request_with_auto_populated_field(): +def test_get_metadata_non_empty_request_with_auto_populated_field(): # This test is a coverage failsafe to make sure that UUID4 fields are # automatically populated, according to AIP-4235, with non-empty requests. client = AlphaAnalyticsDataClient( @@ -6290,28 +6728,24 @@ def test_list_report_tasks_non_empty_request_with_auto_populated_field(): # Populate all string fields in the request which are not UUID4 # since we want to check that UUID4 are populated automatically # if they meet the requirements of AIP 4235. - request = analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + request = analytics_data_api.GetMetadataRequest( + name="name_value", ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: call.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client.list_report_tasks(request=request) + client.get_metadata(request=request) call.assert_called() _, args, _ = call.mock_calls[0] - assert args[0] == analytics_data_api.ListReportTasksRequest( - parent="parent_value", - page_token="page_token_value", + assert args[0] == analytics_data_api.GetMetadataRequest( + name="name_value", ) -def test_list_report_tasks_use_cached_wrapped_rpc(): +def test_get_metadata_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -6325,23 +6759,21 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_report_tasks in client._transport._wrapped_methods + assert client._transport.get_metadata in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.list_report_tasks] = ( - mock_rpc + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.get_metadata] = mock_rpc request = {} - client.list_report_tasks(request) + client.get_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_report_tasks(request) + client.get_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6349,7 +6781,7 @@ def test_list_report_tasks_use_cached_wrapped_rpc(): @pytest.mark.asyncio -async def test_list_report_tasks_async_use_cached_wrapped_rpc( +async def test_get_metadata_async_use_cached_wrapped_rpc( transport: str = "grpc_asyncio", ): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, @@ -6366,7 +6798,7 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( # Ensure method has been cached assert ( - client._client._transport.list_report_tasks + client._client._transport.get_metadata in client._client._transport._wrapped_methods ) @@ -6374,16 +6806,16 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( mock_rpc = mock.AsyncMock() mock_rpc.return_value = mock.Mock() client._client._transport._wrapped_methods[ - client._client._transport.list_report_tasks + client._client._transport.get_metadata ] = mock_rpc request = {} - await client.list_report_tasks(request) + await client.get_metadata(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - await client.list_report_tasks(request) + await client.get_metadata(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 @@ -6391,9 +6823,8 @@ async def test_list_report_tasks_async_use_cached_wrapped_rpc( @pytest.mark.asyncio -async def test_list_report_tasks_async( - transport: str = "grpc_asyncio", - request_type=analytics_data_api.ListReportTasksRequest, +async def test_get_metadata_async( + transport: str = "grpc_asyncio", request_type=analytics_data_api.GetMetadataRequest ): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), @@ -6405,50 +6836,46 @@ async def test_list_report_tasks_async( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + analytics_data_api.Metadata( + name="name_value", ) ) - response = await client.list_report_tasks(request) + response = await client.get_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetMetadataRequest() assert args[0] == request # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksAsyncPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == "name_value" @pytest.mark.asyncio -async def test_list_report_tasks_async_from_dict(): - await test_list_report_tasks_async(request_type=dict) +async def test_get_metadata_async_from_dict(): + await test_get_metadata_async(request_type=dict) -def test_list_report_tasks_field_headers(): +def test_get_metadata_field_headers(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetMetadataRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - call.return_value = analytics_data_api.ListReportTasksResponse() - client.list_report_tasks(request) + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: + call.return_value = analytics_data_api.Metadata() + client.get_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) == 1 @@ -6459,30 +6886,28 @@ def test_list_report_tasks_field_headers(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] @pytest.mark.asyncio -async def test_list_report_tasks_field_headers_async(): +async def test_get_metadata_field_headers_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetMetadataRequest() - request.parent = "parent_value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() + analytics_data_api.Metadata() ) - await client.list_report_tasks(request) + await client.get_metadata(request) # Establish that the underlying gRPC stub method was called. assert len(call.mock_calls) @@ -6493,296 +6918,90 @@ async def test_list_report_tasks_field_headers_async(): _, _, kw = call.mock_calls[0] assert ( "x-goog-request-params", - "parent=parent_value", + "name=name_value", ) in kw["metadata"] -def test_list_report_tasks_flattened(): +def test_get_metadata_flattened(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), ) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() + call.return_value = analytics_data_api.Metadata() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_report_tasks( - parent="parent_value", + client.get_metadata( + name="name_value", ) # Establish that the underlying call was made with the expected # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_report_tasks_flattened_error(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_report_tasks_flattened_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = analytics_data_api.ListReportTasksResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.ListReportTasksResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_report_tasks( - parent="parent_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" + arg = args[0].name + mock_val = "name_value" assert arg == mock_val -@pytest.mark.asyncio -async def test_list_report_tasks_flattened_error_async(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_report_tasks( - analytics_data_api.ListReportTasksRequest(), - parent="parent_value", - ) - - -def test_list_report_tasks_pager(transport_name: str = "grpc"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, - ) - - expected_metadata = () - retry = retries.Retry() - timeout = 5 - expected_metadata = tuple(expected_metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_report_tasks(request={}, retry=retry, timeout=timeout) - - assert pager._metadata == expected_metadata - assert pager._retry == retry - assert pager._timeout == timeout - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) - - -def test_list_report_tasks_pages(transport_name: str = "grpc"): +def test_get_metadata_flattened_error(): client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, - ) - pages = list(client.list_report_tasks(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_report_tasks_async_pager(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name="name_value", ) - async_pager = await client.list_report_tasks( - request={}, + + +@pytest.mark.asyncio +async def test_get_metadata_flattened_async(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = analytics_data_api.Metadata() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.Metadata() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_metadata( + name="name_value", ) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - assert len(responses) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in responses) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio -async def test_list_report_tasks_async_pages(): +async def test_get_metadata_flattened_error_async(): client = AlphaAnalyticsDataAsyncClient( credentials=async_anonymous_credentials(), ) - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_report_tasks), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - next_page_token="abc", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[], - next_page_token="def", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - ], - next_page_token="ghi", - ), - analytics_data_api.ListReportTasksResponse( - report_tasks=[ - analytics_data_api.ReportTask(), - analytics_data_api.ReportTask(), - ], - ), - RuntimeError, + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name="name_value", ) - pages = [] - # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` - # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 - async for page_ in ( # pragma: no branch - await client.list_report_tasks(request={}) - ).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token def test_run_funnel_report_rest_use_cached_wrapped_rpc(): @@ -7163,214 +7382,26 @@ def test_query_audience_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.query_audience_list(**mock_args) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:query" - % client.transport._host, - args[1], - ) - - -def test_query_audience_list_rest_flattened_error(transport: str = "rest"): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.query_audience_list( - analytics_data_api.QueryAudienceListRequest(), - name="name_value", - ) - - -def test_sheet_export_audience_list_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.sheet_export_audience_list - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.sheet_export_audience_list - ] = mock_rpc - - request = {} - client.sheet_export_audience_list(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.sheet_export_audience_list(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_sheet_export_audience_list_rest_required_fields( - request_type=analytics_data_api.SheetExportAudienceListRequest, -): - transport_class = transports.AlphaAnalyticsDataRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["name"] = "name_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).sheet_export_audience_list._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.sheet_export_audience_list(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_sheet_export_audience_list_rest_unset_required_fields(): - transport = transports.AlphaAnalyticsDataRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.sheet_export_audience_list._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) - - -def test_sheet_export_audience_list_rest_flattened(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"name": "properties/sample1/audienceLists/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.sheet_export_audience_list(**mock_args) + client.query_audience_list(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1alpha/{name=properties/*/audienceLists/*}:exportSheet" + "%s/v1alpha/{name=properties/*/audienceLists/*}:query" % client.transport._host, args[1], ) -def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest"): +def test_query_audience_list_rest_flattened_error(transport: str = "rest"): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -7379,8 +7410,8 @@ def test_sheet_export_audience_list_rest_flattened_error(transport: str = "rest" # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.sheet_export_audience_list( - analytics_data_api.SheetExportAudienceListRequest(), + client.query_audience_list( + analytics_data_api.QueryAudienceListRequest(), name="name_value", ) @@ -9454,15 +9485,312 @@ def test_list_report_tasks_rest_pager(transport: str = "rest"): sample_request = {"parent": "properties/sample1"} - pager = client.list_report_tasks(request=sample_request) + pager = client.list_report_tasks(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) + + pages = list(client.list_report_tasks(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_run_report_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.run_report in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.run_report] = mock_rpc + + request = {} + client.run_report(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.run_report(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_run_report_rest_required_fields( + request_type=analytics_data_api.RunReportRequest, +): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["property"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["property"] = "property_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).run_report._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "property" in jsonified_request + assert jsonified_request["property"] == "property_value" + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.RunReportResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.RunReportResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.run_report(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_run_report_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.run_report._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("property",))) + + +def test_get_metadata_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_metadata in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_metadata] = mock_rpc + + request = {} + client.get_metadata(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_metadata(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_metadata_rest_required_fields( + request_type=analytics_data_api.GetMetadataRequest, +): + transport_class = transports.AlphaAnalyticsDataRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_metadata._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_metadata(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_metadata_rest_unset_required_fields(): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_metadata._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_metadata_rest_flattened(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = analytics_data_api.Metadata() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/metadata"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = analytics_data_api.Metadata.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_metadata(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/metadata}" % client.transport._host, args[1] + ) + - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, analytics_data_api.ReportTask) for i in results) +def test_get_metadata_rest_flattened_error(transport: str = "rest"): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) - pages = list(client.list_report_tasks(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_metadata( + analytics_data_api.GetMetadataRequest(), + name="name_value", + ) def test_credentials_transport_error(): @@ -9640,29 +9968,6 @@ def test_query_audience_list_empty_call_grpc(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_sheet_export_audience_list_empty_call_grpc(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" - ) as call: - call.return_value = analytics_data_api.SheetExportAudienceListResponse() - client.sheet_export_audience_list(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analytics_data_api.SheetExportAudienceListRequest() - - assert args[0] == request_msg - - # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_audience_list_empty_call_grpc(): @@ -9891,6 +10196,48 @@ def test_list_report_tasks_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_report_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_report), "__call__") as call: + call.return_value = analytics_data_api.RunReportResponse() + client.run_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_empty_call_grpc(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: + call.return_value = analytics_data_api.Metadata() + client.get_metadata(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetMetadataRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = AlphaAnalyticsDataAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -9990,37 +10337,6 @@ async def test_query_audience_list_empty_call_grpc_asyncio(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_sheet_export_audience_list_empty_call_grpc_asyncio(): - client = AlphaAnalyticsDataAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", - row_count=992, - ) - ) - await client.sheet_export_audience_list(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analytics_data_api.SheetExportAudienceListRequest() - - assert args[0] == request_msg - - # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -10323,6 +10639,62 @@ async def test_list_report_tasks_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_run_report_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_report), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.RunReportResponse( + row_count=992, + kind="kind_value", + next_page_token="next_page_token_value", + ) + ) + await client.run_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_metadata_empty_call_grpc_asyncio(): + client = AlphaAnalyticsDataAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + analytics_data_api.Metadata( + name="name_value", + ) + ) + await client.get_metadata(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetMetadataRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = AlphaAnalyticsDataClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -10580,40 +10952,172 @@ def get_message_fields(field): } ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["audience_list"][field])): - del request_init["audience_list"][field][i][subfield] - else: - del request_init["audience_list"][field][subfield] + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["audience_list"][field])): + del request_init["audience_list"][field][i][subfield] + else: + del request_init["audience_list"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_audience_list(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_audience_list_rest_interceptors(null_interceptor): + transport = transports.AlphaAnalyticsDataRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AlphaAnalyticsDataRestInterceptor(), + ) + client = AlphaAnalyticsDataClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" + ) as post, + mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_audience_list_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = analytics_data_api.CreateAudienceListRequest.pb( + analytics_data_api.CreateAudienceListRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = analytics_data_api.CreateAudienceListRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_audience_list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_query_audience_list_rest_bad_request( + request_type=analytics_data_api.QueryAudienceListRequest, +): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/audienceLists/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.query_audience_list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.QueryAudienceListRequest, + dict, + ], +) +def test_query_audience_list_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/audienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.QueryAudienceListResponse( + row_count=992, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_audience_list(request) + response = client.query_audience_list(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert response.row_count == 992 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_audience_list_rest_interceptors(null_interceptor): +def test_query_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10625,23 +11129,22 @@ def test_create_audience_list_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_create_audience_list_with_metadata", + "post_query_audience_list_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_create_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.CreateAudienceListRequest.pb( - analytics_data_api.CreateAudienceListRequest() + pb_message = analytics_data_api.QueryAudienceListRequest.pb( + analytics_data_api.QueryAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -10653,19 +11156,24 @@ def test_create_audience_list_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = analytics_data_api.QueryAudienceListResponse.to_json( + analytics_data_api.QueryAudienceListResponse() + ) req.return_value.content = return_value - request = analytics_data_api.CreateAudienceListRequest() + request = analytics_data_api.QueryAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = analytics_data_api.QueryAudienceListResponse() + post_with_metadata.return_value = ( + analytics_data_api.QueryAudienceListResponse(), + metadata, + ) - client.create_audience_list( + client.query_audience_list( request, metadata=[ ("key", "val"), @@ -10678,8 +11186,8 @@ def test_create_audience_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_query_audience_list_rest_bad_request( - request_type=analytics_data_api.QueryAudienceListRequest, +def test_get_audience_list_rest_bad_request( + request_type=analytics_data_api.GetAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -10701,17 +11209,17 @@ def test_query_audience_list_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.query_audience_list(request) + client.get_audience_list(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryAudienceListRequest, + analytics_data_api.GetAudienceListRequest, dict, ], ) -def test_query_audience_list_rest_call_success(request_type): +def test_get_audience_list_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -10723,8 +11231,16 @@ def test_query_audience_list_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryAudienceListResponse( + return_value = analytics_data_api.AudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + state=analytics_data_api.AudienceList.State.CREATING, + creation_quota_tokens_charged=3070, row_count=992, + error_message="error_message_value", + percentage_completed=0.2106, + recurring_audience_list="recurring_audience_list_value", ) # Wrap the value into a proper Response obj @@ -10732,20 +11248,28 @@ def test_query_audience_list_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryAudienceListResponse.pb(return_value) + return_value = analytics_data_api.AudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.query_audience_list(request) + response = client.get_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryAudienceListResponse) + assert isinstance(response, analytics_data_api.AudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.state == analytics_data_api.AudienceList.State.CREATING + assert response.creation_quota_tokens_charged == 3070 assert response.row_count == 992 + assert response.error_message == "error_message_value" + assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) + assert response.recurring_audience_list == "recurring_audience_list_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_audience_list_rest_interceptors(null_interceptor): +def test_get_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10758,21 +11282,21 @@ def test_query_audience_list_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_query_audience_list_with_metadata", + "post_get_audience_list_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_query_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.QueryAudienceListRequest.pb( - analytics_data_api.QueryAudienceListRequest() + pb_message = analytics_data_api.GetAudienceListRequest.pb( + analytics_data_api.GetAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -10784,24 +11308,21 @@ def test_query_audience_list_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.QueryAudienceListResponse.to_json( - analytics_data_api.QueryAudienceListResponse() + return_value = analytics_data_api.AudienceList.to_json( + analytics_data_api.AudienceList() ) req.return_value.content = return_value - request = analytics_data_api.QueryAudienceListRequest() + request = analytics_data_api.GetAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.QueryAudienceListResponse() - post_with_metadata.return_value = ( - analytics_data_api.QueryAudienceListResponse(), - metadata, - ) + post.return_value = analytics_data_api.AudienceList() + post_with_metadata.return_value = analytics_data_api.AudienceList(), metadata - client.query_audience_list( + client.get_audience_list( request, metadata=[ ("key", "val"), @@ -10814,14 +11335,14 @@ def test_query_audience_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_sheet_export_audience_list_rest_bad_request( - request_type=analytics_data_api.SheetExportAudienceListRequest, +def test_list_audience_lists_rest_bad_request( + request_type=analytics_data_api.ListAudienceListsRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10837,32 +11358,30 @@ def test_sheet_export_audience_list_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.sheet_export_audience_list(request) + client.list_audience_lists(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.SheetExportAudienceListRequest, + analytics_data_api.ListAudienceListsRequest, dict, ], ) -def test_sheet_export_audience_list_rest_call_success(request_type): +def test_list_audience_lists_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.SheetExportAudienceListResponse( - spreadsheet_uri="spreadsheet_uri_value", - spreadsheet_id="spreadsheet_id_value", - row_count=992, + return_value = analytics_data_api.ListAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -10870,24 +11389,20 @@ def test_sheet_export_audience_list_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.SheetExportAudienceListResponse.pb( - return_value - ) + return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.sheet_export_audience_list(request) + response = client.list_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.SheetExportAudienceListResponse) - assert response.spreadsheet_uri == "spreadsheet_uri_value" - assert response.spreadsheet_id == "spreadsheet_id_value" - assert response.row_count == 992 + assert isinstance(response, pagers.ListAudienceListsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_sheet_export_audience_list_rest_interceptors(null_interceptor): +def test_list_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -10900,23 +11415,21 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_sheet_export_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_sheet_export_audience_list_with_metadata", + "post_list_audience_lists_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_sheet_export_audience_list", + transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.SheetExportAudienceListRequest.pb( - analytics_data_api.SheetExportAudienceListRequest() + pb_message = analytics_data_api.ListAudienceListsRequest.pb( + analytics_data_api.ListAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -10928,24 +11441,24 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.SheetExportAudienceListResponse.to_json( - analytics_data_api.SheetExportAudienceListResponse() + return_value = analytics_data_api.ListAudienceListsResponse.to_json( + analytics_data_api.ListAudienceListsResponse() ) req.return_value.content = return_value - request = analytics_data_api.SheetExportAudienceListRequest() + request = analytics_data_api.ListAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.SheetExportAudienceListResponse() + post.return_value = analytics_data_api.ListAudienceListsResponse() post_with_metadata.return_value = ( - analytics_data_api.SheetExportAudienceListResponse(), + analytics_data_api.ListAudienceListsResponse(), metadata, ) - client.sheet_export_audience_list( + client.list_audience_lists( request, metadata=[ ("key", "val"), @@ -10958,14 +11471,14 @@ def test_sheet_export_audience_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_audience_list_rest_bad_request( - request_type=analytics_data_api.GetAudienceListRequest, +def test_create_recurring_audience_list_rest_bad_request( + request_type=analytics_data_api.CreateRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -10981,38 +11494,117 @@ def test_get_audience_list_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_audience_list(request) + client.create_recurring_audience_list(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetAudienceListRequest, + analytics_data_api.CreateRecurringAudienceListRequest, dict, ], ) -def test_get_audience_list_rest_call_success(request_type): +def test_create_recurring_audience_list_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/audienceLists/sample2"} + request_init = {"parent": "properties/sample1"} + request_init["recurring_audience_list"] = { + "name": "name_value", + "audience": "audience_value", + "audience_display_name": "audience_display_name_value", + "dimensions": [{"dimension_name": "dimension_name_value"}], + "active_days_remaining": 2213, + "audience_lists": ["audience_lists_value1", "audience_lists_value2"], + "webhook_notification": { + "uri": "uri_value", + "channel_token": "channel_token_value", + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ + "recurring_audience_list" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "recurring_audience_list" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["recurring_audience_list"][field])): + del request_init["recurring_audience_list"][field][i][subfield] + else: + del request_init["recurring_audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.AudienceList( + return_value = analytics_data_api.RecurringAudienceList( name="name_value", audience="audience_value", audience_display_name="audience_display_name_value", - state=analytics_data_api.AudienceList.State.CREATING, - creation_quota_tokens_charged=3070, - row_count=992, - error_message="error_message_value", - percentage_completed=0.2106, - recurring_audience_list="recurring_audience_list_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj @@ -11020,28 +11612,24 @@ def test_get_audience_list_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.AudienceList.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_audience_list(request) + response = client.create_recurring_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.AudienceList) + assert isinstance(response, analytics_data_api.RecurringAudienceList) assert response.name == "name_value" assert response.audience == "audience_value" assert response.audience_display_name == "audience_display_name_value" - assert response.state == analytics_data_api.AudienceList.State.CREATING - assert response.creation_quota_tokens_charged == 3070 - assert response.row_count == 992 - assert response.error_message == "error_message_value" - assert math.isclose(response.percentage_completed, 0.2106, rel_tol=1e-6) - assert response.recurring_audience_list == "recurring_audience_list_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_audience_list_rest_interceptors(null_interceptor): +def test_create_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11054,21 +11642,23 @@ def test_get_audience_list_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "post_create_recurring_audience_list", ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_get_audience_list_with_metadata", + "post_create_recurring_audience_list_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_audience_list" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_create_recurring_audience_list", ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.GetAudienceListRequest.pb( - analytics_data_api.GetAudienceListRequest() + pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( + analytics_data_api.CreateRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -11080,21 +11670,24 @@ def test_get_audience_list_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.AudienceList.to_json( - analytics_data_api.AudienceList() + return_value = analytics_data_api.RecurringAudienceList.to_json( + analytics_data_api.RecurringAudienceList() ) req.return_value.content = return_value - request = analytics_data_api.GetAudienceListRequest() + request = analytics_data_api.CreateRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.AudienceList() - post_with_metadata.return_value = analytics_data_api.AudienceList(), metadata + post.return_value = analytics_data_api.RecurringAudienceList() + post_with_metadata.return_value = ( + analytics_data_api.RecurringAudienceList(), + metadata, + ) - client.get_audience_list( + client.create_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -11107,14 +11700,14 @@ def test_get_audience_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_audience_lists_rest_bad_request( - request_type=analytics_data_api.ListAudienceListsRequest, +def test_get_recurring_audience_list_rest_bad_request( + request_type=analytics_data_api.GetRecurringAudienceListRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11130,30 +11723,34 @@ def test_list_audience_lists_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_audience_lists(request) + client.get_recurring_audience_list(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListAudienceListsRequest, + analytics_data_api.GetRecurringAudienceListRequest, dict, ], ) -def test_list_audience_lists_rest_call_success(request_type): +def test_get_recurring_audience_list_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListAudienceListsResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.RecurringAudienceList( + name="name_value", + audience="audience_value", + audience_display_name="audience_display_name_value", + active_days_remaining=2213, + audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj @@ -11161,20 +11758,24 @@ def test_list_audience_lists_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListAudienceListsResponse.pb(return_value) + return_value = analytics_data_api.RecurringAudienceList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_audience_lists(request) + response = client.get_recurring_audience_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert response.name == "name_value" + assert response.audience == "audience_value" + assert response.audience_display_name == "audience_display_name_value" + assert response.active_days_remaining == 2213 + assert response.audience_lists == ["audience_lists_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_audience_lists_rest_interceptors(null_interceptor): +def test_get_recurring_audience_list_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11187,21 +11788,23 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "post_get_recurring_audience_list", ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_audience_lists_with_metadata", + "post_get_recurring_audience_list_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_list_audience_lists" + transports.AlphaAnalyticsDataRestInterceptor, + "pre_get_recurring_audience_list", ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.ListAudienceListsRequest.pb( - analytics_data_api.ListAudienceListsRequest() + pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( + analytics_data_api.GetRecurringAudienceListRequest() ) transcode.return_value = { "method": "post", @@ -11213,24 +11816,24 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.ListAudienceListsResponse.to_json( - analytics_data_api.ListAudienceListsResponse() + return_value = analytics_data_api.RecurringAudienceList.to_json( + analytics_data_api.RecurringAudienceList() ) req.return_value.content = return_value - request = analytics_data_api.ListAudienceListsRequest() + request = analytics_data_api.GetRecurringAudienceListRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListAudienceListsResponse() + post.return_value = analytics_data_api.RecurringAudienceList() post_with_metadata.return_value = ( - analytics_data_api.ListAudienceListsResponse(), + analytics_data_api.RecurringAudienceList(), metadata, ) - client.list_audience_lists( + client.get_recurring_audience_list( request, metadata=[ ("key", "val"), @@ -11243,8 +11846,8 @@ def test_list_audience_lists_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_recurring_audience_list_rest_bad_request( - request_type=analytics_data_api.CreateRecurringAudienceListRequest, +def test_list_recurring_audience_lists_rest_bad_request( + request_type=analytics_data_api.ListRecurringAudienceListsRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11266,117 +11869,30 @@ def test_create_recurring_audience_list_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_recurring_audience_list(request) + client.list_recurring_audience_lists(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.CreateRecurringAudienceListRequest, + analytics_data_api.ListRecurringAudienceListsRequest, dict, ], ) -def test_create_recurring_audience_list_rest_call_success(request_type): +def test_list_recurring_audience_lists_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} - request_init["recurring_audience_list"] = { - "name": "name_value", - "audience": "audience_value", - "audience_display_name": "audience_display_name_value", - "dimensions": [{"dimension_name": "dimension_name_value"}], - "active_days_remaining": 2213, - "audience_lists": ["audience_lists_value1", "audience_lists_value2"], - "webhook_notification": { - "uri": "uri_value", - "channel_token": "channel_token_value", - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateRecurringAudienceListRequest.meta.fields[ - "recurring_audience_list" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "recurring_audience_list" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["recurring_audience_list"][field])): - del request_init["recurring_audience_list"][field][i][subfield] - else: - del request_init["recurring_audience_list"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList( - name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], + return_value = analytics_data_api.ListRecurringAudienceListsResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -11384,24 +11900,22 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( + return_value + ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_recurring_audience_list(request) + response = client.list_recurring_audience_lists(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) - assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] + assert isinstance(response, pagers.ListRecurringAudienceListsPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_recurring_audience_list_rest_interceptors(null_interceptor): +def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11415,22 +11929,22 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_create_recurring_audience_list", + "post_list_recurring_audience_lists", ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_create_recurring_audience_list_with_metadata", + "post_list_recurring_audience_lists_with_metadata", ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "pre_create_recurring_audience_list", + "pre_list_recurring_audience_lists", ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.CreateRecurringAudienceListRequest.pb( - analytics_data_api.CreateRecurringAudienceListRequest() + pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( + analytics_data_api.ListRecurringAudienceListsRequest() ) transcode.return_value = { "method": "post", @@ -11442,24 +11956,24 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.RecurringAudienceList.to_json( - analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.ListRecurringAudienceListsResponse.to_json( + analytics_data_api.ListRecurringAudienceListsResponse() ) req.return_value.content = return_value - request = analytics_data_api.CreateRecurringAudienceListRequest() + request = analytics_data_api.ListRecurringAudienceListsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RecurringAudienceList() + post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() post_with_metadata.return_value = ( - analytics_data_api.RecurringAudienceList(), + analytics_data_api.ListRecurringAudienceListsResponse(), metadata, ) - client.create_recurring_audience_list( + client.list_recurring_audience_lists( request, metadata=[ ("key", "val"), @@ -11472,14 +11986,14 @@ def test_create_recurring_audience_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_recurring_audience_list_rest_bad_request( - request_type=analytics_data_api.GetRecurringAudienceListRequest, +def test_get_property_quotas_snapshot_rest_bad_request( + request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11495,34 +12009,30 @@ def test_get_recurring_audience_list_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_recurring_audience_list(request) + client.get_property_quotas_snapshot(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetRecurringAudienceListRequest, + analytics_data_api.GetPropertyQuotasSnapshotRequest, dict, ], ) -def test_get_recurring_audience_list_rest_call_success(request_type): +def test_get_property_quotas_snapshot_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/recurringAudienceLists/sample2"} + request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.RecurringAudienceList( + return_value = analytics_data_api.PropertyQuotasSnapshot( name="name_value", - audience="audience_value", - audience_display_name="audience_display_name_value", - active_days_remaining=2213, - audience_lists=["audience_lists_value"], ) # Wrap the value into a proper Response obj @@ -11530,24 +12040,20 @@ def test_get_recurring_audience_list_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.RecurringAudienceList.pb(return_value) + return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_recurring_audience_list(request) + response = client.get_property_quotas_snapshot(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.RecurringAudienceList) + assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) assert response.name == "name_value" - assert response.audience == "audience_value" - assert response.audience_display_name == "audience_display_name_value" - assert response.active_days_remaining == 2213 - assert response.audience_lists == ["audience_lists_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_recurring_audience_list_rest_interceptors(null_interceptor): +def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11561,22 +12067,22 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_get_recurring_audience_list", + "post_get_property_quotas_snapshot", ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_get_recurring_audience_list_with_metadata", + "post_get_property_quotas_snapshot_with_metadata", ) as post_with_metadata, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "pre_get_recurring_audience_list", + "pre_get_property_quotas_snapshot", ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.GetRecurringAudienceListRequest.pb( - analytics_data_api.GetRecurringAudienceListRequest() + pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( + analytics_data_api.GetPropertyQuotasSnapshotRequest() ) transcode.return_value = { "method": "post", @@ -11588,24 +12094,24 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.RecurringAudienceList.to_json( - analytics_data_api.RecurringAudienceList() + return_value = analytics_data_api.PropertyQuotasSnapshot.to_json( + analytics_data_api.PropertyQuotasSnapshot() ) req.return_value.content = return_value - request = analytics_data_api.GetRecurringAudienceListRequest() + request = analytics_data_api.GetPropertyQuotasSnapshotRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.RecurringAudienceList() + post.return_value = analytics_data_api.PropertyQuotasSnapshot() post_with_metadata.return_value = ( - analytics_data_api.RecurringAudienceList(), + analytics_data_api.PropertyQuotasSnapshot(), metadata, ) - client.get_recurring_audience_list( + client.get_property_quotas_snapshot( request, metadata=[ ("key", "val"), @@ -11618,8 +12124,8 @@ def test_get_recurring_audience_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_recurring_audience_lists_rest_bad_request( - request_type=analytics_data_api.ListRecurringAudienceListsRequest, +def test_create_report_task_rest_bad_request( + request_type=analytics_data_api.CreateReportTaskRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11641,53 +12147,213 @@ def test_list_recurring_audience_lists_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_recurring_audience_lists(request) + client.create_report_task(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListRecurringAudienceListsRequest, + analytics_data_api.CreateReportTaskRequest, dict, ], ) -def test_list_recurring_audience_lists_rest_call_success(request_type): +def test_create_report_task_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = {"parent": "properties/sample1"} + request_init["report_task"] = { + "name": "name_value", + "report_definition": { + "dimensions": [ + { + "name": "name_value", + "dimension_expression": { + "lower_case": {"dimension_name": "dimension_name_value"}, + "upper_case": {}, + "concatenate": { + "dimension_names": [ + "dimension_names_value1", + "dimension_names_value2", + ], + "delimiter": "delimiter_value", + }, + }, + } + ], + "metrics": [ + { + "name": "name_value", + "expression": "expression_value", + "invisible": True, + } + ], + "date_ranges": [ + { + "start_date": "start_date_value", + "end_date": "end_date_value", + "name": "name_value", + } + ], + "dimension_filter": { + "and_group": {"expressions": {}}, + "or_group": {}, + "not_expression": {}, + "filter": { + "field_name": "field_name_value", + "string_filter": { + "match_type": 1, + "value": "value_value", + "case_sensitive": True, + }, + "in_list_filter": { + "values": ["values_value1", "values_value2"], + "case_sensitive": True, + }, + "numeric_filter": { + "operation": 1, + "value": { + "int64_value": 1073, + "double_value": 0.12710000000000002, + }, + }, + "between_filter": {"from_value": {}, "to_value": {}}, + "empty_filter": {}, + }, + }, + "metric_filter": {}, + "offset": 647, + "limit": 543, + "metric_aggregations": [1], + "order_bys": [ + { + "metric": {"metric_name": "metric_name_value"}, + "dimension": { + "dimension_name": "dimension_name_value", + "order_type": 1, + }, + "desc": True, + } + ], + "currency_code": "currency_code_value", + "cohort_spec": { + "cohorts": [ + { + "name": "name_value", + "dimension": "dimension_value", + "date_range": {}, + } + ], + "cohorts_range": { + "granularity": 1, + "start_offset": 1300, + "end_offset": 1053, + }, + "cohort_report_settings": {"accumulate": True}, + }, + "keep_empty_rows": True, + "sampling_level": 1, + }, + "report_metadata": { + "state": 1, + "begin_creating_time": {"seconds": 751, "nanos": 543}, + "creation_quota_tokens_charged": 3070, + "task_row_count": 1522, + "error_message": "error_message_value", + "total_row_count": 1635, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = analytics_data_api.CreateReportTaskRequest.meta.fields["report_task"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["report_task"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["report_task"][field])): + del request_init["report_task"][field][i][subfield] + else: + del request_init["report_task"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListRecurringAudienceListsResponse( - next_page_token="next_page_token_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = analytics_data_api.ListRecurringAudienceListsResponse.pb( - return_value - ) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_recurring_audience_lists(request) + response = client.create_report_task(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRecurringAudienceListsPager) - assert response.next_page_token == "next_page_token_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): +def test_create_report_task_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11699,24 +12365,23 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_list_recurring_audience_lists", + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AlphaAnalyticsDataRestInterceptor, "post_create_report_task" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_recurring_audience_lists_with_metadata", + "post_create_report_task_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_list_recurring_audience_lists", + transports.AlphaAnalyticsDataRestInterceptor, "pre_create_report_task" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.ListRecurringAudienceListsRequest.pb( - analytics_data_api.ListRecurringAudienceListsRequest() + pb_message = analytics_data_api.CreateReportTaskRequest.pb( + analytics_data_api.CreateReportTaskRequest() ) transcode.return_value = { "method": "post", @@ -11728,24 +12393,19 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.ListRecurringAudienceListsResponse.to_json( - analytics_data_api.ListRecurringAudienceListsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = analytics_data_api.ListRecurringAudienceListsRequest() + request = analytics_data_api.CreateReportTaskRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListRecurringAudienceListsResponse() - post_with_metadata.return_value = ( - analytics_data_api.ListRecurringAudienceListsResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_recurring_audience_lists( + client.create_report_task( request, metadata=[ ("key", "val"), @@ -11758,14 +12418,14 @@ def test_list_recurring_audience_lists_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_property_quotas_snapshot_rest_bad_request( - request_type=analytics_data_api.GetPropertyQuotasSnapshotRequest, +def test_query_report_task_rest_bad_request( + request_type=analytics_data_api.QueryReportTaskRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} + request_init = {"name": "properties/sample1/reportTasks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11781,30 +12441,30 @@ def test_get_property_quotas_snapshot_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_property_quotas_snapshot(request) + client.query_report_task(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetPropertyQuotasSnapshotRequest, + analytics_data_api.QueryReportTaskRequest, dict, ], ) -def test_get_property_quotas_snapshot_rest_call_success(request_type): +def test_query_report_task_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/propertyQuotasSnapshot"} + request_init = {"name": "properties/sample1/reportTasks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.PropertyQuotasSnapshot( - name="name_value", + return_value = analytics_data_api.QueryReportTaskResponse( + row_count=992, ) # Wrap the value into a proper Response obj @@ -11812,20 +12472,20 @@ def test_get_property_quotas_snapshot_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.PropertyQuotasSnapshot.pb(return_value) + return_value = analytics_data_api.QueryReportTaskResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_property_quotas_snapshot(request) + response = client.query_report_task(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.PropertyQuotasSnapshot) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.QueryReportTaskResponse) + assert response.row_count == 992 @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): +def test_query_report_task_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -11838,23 +12498,21 @@ def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "post_get_property_quotas_snapshot", + transports.AlphaAnalyticsDataRestInterceptor, "post_query_report_task" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_get_property_quotas_snapshot_with_metadata", + "post_query_report_task_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, - "pre_get_property_quotas_snapshot", + transports.AlphaAnalyticsDataRestInterceptor, "pre_query_report_task" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.GetPropertyQuotasSnapshotRequest.pb( - analytics_data_api.GetPropertyQuotasSnapshotRequest() + pb_message = analytics_data_api.QueryReportTaskRequest.pb( + analytics_data_api.QueryReportTaskRequest() ) transcode.return_value = { "method": "post", @@ -11866,24 +12524,24 @@ def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.PropertyQuotasSnapshot.to_json( - analytics_data_api.PropertyQuotasSnapshot() + return_value = analytics_data_api.QueryReportTaskResponse.to_json( + analytics_data_api.QueryReportTaskResponse() ) req.return_value.content = return_value - request = analytics_data_api.GetPropertyQuotasSnapshotRequest() + request = analytics_data_api.QueryReportTaskRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.PropertyQuotasSnapshot() + post.return_value = analytics_data_api.QueryReportTaskResponse() post_with_metadata.return_value = ( - analytics_data_api.PropertyQuotasSnapshot(), + analytics_data_api.QueryReportTaskResponse(), metadata, ) - client.get_property_quotas_snapshot( + client.query_report_task( request, metadata=[ ("key", "val"), @@ -11896,14 +12554,14 @@ def test_get_property_quotas_snapshot_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_report_task_rest_bad_request( - request_type=analytics_data_api.CreateReportTaskRequest, +def test_get_report_task_rest_bad_request( + request_type=analytics_data_api.GetReportTaskRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/reportTasks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -11919,213 +12577,51 @@ def test_create_report_task_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_report_task(request) - - -@pytest.mark.parametrize( - "request_type", - [ - analytics_data_api.CreateReportTaskRequest, - dict, - ], -) -def test_create_report_task_rest_call_success(request_type): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} - request_init["report_task"] = { - "name": "name_value", - "report_definition": { - "dimensions": [ - { - "name": "name_value", - "dimension_expression": { - "lower_case": {"dimension_name": "dimension_name_value"}, - "upper_case": {}, - "concatenate": { - "dimension_names": [ - "dimension_names_value1", - "dimension_names_value2", - ], - "delimiter": "delimiter_value", - }, - }, - } - ], - "metrics": [ - { - "name": "name_value", - "expression": "expression_value", - "invisible": True, - } - ], - "date_ranges": [ - { - "start_date": "start_date_value", - "end_date": "end_date_value", - "name": "name_value", - } - ], - "dimension_filter": { - "and_group": {"expressions": {}}, - "or_group": {}, - "not_expression": {}, - "filter": { - "field_name": "field_name_value", - "string_filter": { - "match_type": 1, - "value": "value_value", - "case_sensitive": True, - }, - "in_list_filter": { - "values": ["values_value1", "values_value2"], - "case_sensitive": True, - }, - "numeric_filter": { - "operation": 1, - "value": { - "int64_value": 1073, - "double_value": 0.12710000000000002, - }, - }, - "between_filter": {"from_value": {}, "to_value": {}}, - "empty_filter": {}, - }, - }, - "metric_filter": {}, - "offset": 647, - "limit": 543, - "metric_aggregations": [1], - "order_bys": [ - { - "metric": {"metric_name": "metric_name_value"}, - "dimension": { - "dimension_name": "dimension_name_value", - "order_type": 1, - }, - "desc": True, - } - ], - "currency_code": "currency_code_value", - "cohort_spec": { - "cohorts": [ - { - "name": "name_value", - "dimension": "dimension_value", - "date_range": {}, - } - ], - "cohorts_range": { - "granularity": 1, - "start_offset": 1300, - "end_offset": 1053, - }, - "cohort_report_settings": {"accumulate": True}, - }, - "keep_empty_rows": True, - "sampling_level": 1, - }, - "report_metadata": { - "state": 1, - "begin_creating_time": {"seconds": 751, "nanos": 543}, - "creation_quota_tokens_charged": 3070, - "task_row_count": 1522, - "error_message": "error_message_value", - "total_row_count": 1635, - }, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = analytics_data_api.CreateReportTaskRequest.meta.fields["report_task"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["report_task"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + client.get_report_task(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["report_task"][field])): - del request_init["report_task"][field][i][subfield] - else: - del request_init["report_task"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + analytics_data_api.GetReportTaskRequest, + dict, + ], +) +def test_get_report_task_rest_call_success(request_type): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/reportTasks/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = analytics_data_api.ReportTask( + name="name_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = analytics_data_api.ReportTask.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_report_task(request) + response = client.get_report_task(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, analytics_data_api.ReportTask) + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_report_task_rest_interceptors(null_interceptor): +def test_get_report_task_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12137,23 +12633,22 @@ def test_create_report_task_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_create_report_task" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_report_task" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_create_report_task_with_metadata", + "post_get_report_task_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_create_report_task" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_report_task" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.CreateReportTaskRequest.pb( - analytics_data_api.CreateReportTaskRequest() + pb_message = analytics_data_api.GetReportTaskRequest.pb( + analytics_data_api.GetReportTaskRequest() ) transcode.return_value = { "method": "post", @@ -12165,19 +12660,21 @@ def test_create_report_task_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = analytics_data_api.ReportTask.to_json( + analytics_data_api.ReportTask() + ) req.return_value.content = return_value - request = analytics_data_api.CreateReportTaskRequest() + request = analytics_data_api.GetReportTaskRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = analytics_data_api.ReportTask() + post_with_metadata.return_value = analytics_data_api.ReportTask(), metadata - client.create_report_task( + client.get_report_task( request, metadata=[ ("key", "val"), @@ -12190,14 +12687,14 @@ def test_create_report_task_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_query_report_task_rest_bad_request( - request_type=analytics_data_api.QueryReportTaskRequest, +def test_list_report_tasks_rest_bad_request( + request_type=analytics_data_api.ListReportTasksRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/reportTasks/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12213,30 +12710,30 @@ def test_query_report_task_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.query_report_task(request) + client.list_report_tasks(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.QueryReportTaskRequest, + analytics_data_api.ListReportTasksRequest, dict, ], ) -def test_query_report_task_rest_call_success(request_type): +def test_list_report_tasks_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/reportTasks/sample2"} + request_init = {"parent": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.QueryReportTaskResponse( - row_count=992, + return_value = analytics_data_api.ListReportTasksResponse( + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -12244,20 +12741,20 @@ def test_query_report_task_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.QueryReportTaskResponse.pb(return_value) + return_value = analytics_data_api.ListReportTasksResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.query_report_task(request) + response = client.list_report_tasks(request) # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.QueryReportTaskResponse) - assert response.row_count == 992 + assert isinstance(response, pagers.ListReportTasksPager) + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_query_report_task_rest_interceptors(null_interceptor): +def test_list_report_tasks_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12270,21 +12767,21 @@ def test_query_report_task_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_query_report_task" + transports.AlphaAnalyticsDataRestInterceptor, "post_list_report_tasks" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_query_report_task_with_metadata", + "post_list_report_tasks_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_query_report_task" + transports.AlphaAnalyticsDataRestInterceptor, "pre_list_report_tasks" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.QueryReportTaskRequest.pb( - analytics_data_api.QueryReportTaskRequest() + pb_message = analytics_data_api.ListReportTasksRequest.pb( + analytics_data_api.ListReportTasksRequest() ) transcode.return_value = { "method": "post", @@ -12296,24 +12793,24 @@ def test_query_report_task_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.QueryReportTaskResponse.to_json( - analytics_data_api.QueryReportTaskResponse() + return_value = analytics_data_api.ListReportTasksResponse.to_json( + analytics_data_api.ListReportTasksResponse() ) req.return_value.content = return_value - request = analytics_data_api.QueryReportTaskRequest() + request = analytics_data_api.ListReportTasksRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.QueryReportTaskResponse() + post.return_value = analytics_data_api.ListReportTasksResponse() post_with_metadata.return_value = ( - analytics_data_api.QueryReportTaskResponse(), + analytics_data_api.ListReportTasksResponse(), metadata, ) - client.query_report_task( + client.list_report_tasks( request, metadata=[ ("key", "val"), @@ -12326,14 +12823,12 @@ def test_query_report_task_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_report_task_rest_bad_request( - request_type=analytics_data_api.GetReportTaskRequest, -): +def test_run_report_rest_bad_request(request_type=analytics_data_api.RunReportRequest): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/reportTasks/sample2"} + request_init = {"property": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12349,30 +12844,32 @@ def test_get_report_task_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_report_task(request) + client.run_report(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.GetReportTaskRequest, + analytics_data_api.RunReportRequest, dict, ], ) -def test_get_report_task_rest_call_success(request_type): +def test_run_report_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "properties/sample1/reportTasks/sample2"} + request_init = {"property": "properties/sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ReportTask( - name="name_value", + return_value = analytics_data_api.RunReportResponse( + row_count=992, + kind="kind_value", + next_page_token="next_page_token_value", ) # Wrap the value into a proper Response obj @@ -12380,20 +12877,24 @@ def test_get_report_task_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ReportTask.pb(return_value) + return_value = analytics_data_api.RunReportResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_report_task(request) + response = client.run_report(request) + + assert response.raw_page is response # Establish that the response is the type that we expect. - assert isinstance(response, analytics_data_api.ReportTask) - assert response.name == "name_value" + assert isinstance(response, analytics_data_api.RunReportResponse) + assert response.row_count == 992 + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_report_task_rest_interceptors(null_interceptor): +def test_run_report_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12406,21 +12907,21 @@ def test_get_report_task_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_get_report_task" + transports.AlphaAnalyticsDataRestInterceptor, "post_run_report" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_get_report_task_with_metadata", + "post_run_report_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_get_report_task" + transports.AlphaAnalyticsDataRestInterceptor, "pre_run_report" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.GetReportTaskRequest.pb( - analytics_data_api.GetReportTaskRequest() + pb_message = analytics_data_api.RunReportRequest.pb( + analytics_data_api.RunReportRequest() ) transcode.return_value = { "method": "post", @@ -12432,21 +12933,24 @@ def test_get_report_task_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.ReportTask.to_json( - analytics_data_api.ReportTask() + return_value = analytics_data_api.RunReportResponse.to_json( + analytics_data_api.RunReportResponse() ) req.return_value.content = return_value - request = analytics_data_api.GetReportTaskRequest() + request = analytics_data_api.RunReportRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ReportTask() - post_with_metadata.return_value = analytics_data_api.ReportTask(), metadata + post.return_value = analytics_data_api.RunReportResponse() + post_with_metadata.return_value = ( + analytics_data_api.RunReportResponse(), + metadata, + ) - client.get_report_task( + client.run_report( request, metadata=[ ("key", "val"), @@ -12459,14 +12963,14 @@ def test_get_report_task_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_report_tasks_rest_bad_request( - request_type=analytics_data_api.ListReportTasksRequest, +def test_get_metadata_rest_bad_request( + request_type=analytics_data_api.GetMetadataRequest, ): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/metadata"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -12482,30 +12986,30 @@ def test_list_report_tasks_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_report_tasks(request) + client.get_metadata(request) @pytest.mark.parametrize( "request_type", [ - analytics_data_api.ListReportTasksRequest, + analytics_data_api.GetMetadataRequest, dict, ], ) -def test_list_report_tasks_rest_call_success(request_type): +def test_get_metadata_rest_call_success(request_type): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "properties/sample1"} + request_init = {"name": "properties/sample1/metadata"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = analytics_data_api.ListReportTasksResponse( - next_page_token="next_page_token_value", + return_value = analytics_data_api.Metadata( + name="name_value", ) # Wrap the value into a proper Response obj @@ -12513,20 +13017,20 @@ def test_list_report_tasks_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = analytics_data_api.ListReportTasksResponse.pb(return_value) + return_value = analytics_data_api.Metadata.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_report_tasks(request) + response = client.get_metadata(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListReportTasksPager) - assert response.next_page_token == "next_page_token_value" + assert isinstance(response, analytics_data_api.Metadata) + assert response.name == "name_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_report_tasks_rest_interceptors(null_interceptor): +def test_get_metadata_rest_interceptors(null_interceptor): transport = transports.AlphaAnalyticsDataRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -12539,21 +13043,21 @@ def test_list_report_tasks_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "post_list_report_tasks" + transports.AlphaAnalyticsDataRestInterceptor, "post_get_metadata" ) as post, mock.patch.object( transports.AlphaAnalyticsDataRestInterceptor, - "post_list_report_tasks_with_metadata", + "post_get_metadata_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.AlphaAnalyticsDataRestInterceptor, "pre_list_report_tasks" + transports.AlphaAnalyticsDataRestInterceptor, "pre_get_metadata" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = analytics_data_api.ListReportTasksRequest.pb( - analytics_data_api.ListReportTasksRequest() + pb_message = analytics_data_api.GetMetadataRequest.pb( + analytics_data_api.GetMetadataRequest() ) transcode.return_value = { "method": "post", @@ -12565,24 +13069,21 @@ def test_list_report_tasks_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = analytics_data_api.ListReportTasksResponse.to_json( - analytics_data_api.ListReportTasksResponse() + return_value = analytics_data_api.Metadata.to_json( + analytics_data_api.Metadata() ) req.return_value.content = return_value - request = analytics_data_api.ListReportTasksRequest() + request = analytics_data_api.GetMetadataRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = analytics_data_api.ListReportTasksResponse() - post_with_metadata.return_value = ( - analytics_data_api.ListReportTasksResponse(), - metadata, - ) + post.return_value = analytics_data_api.Metadata() + post_with_metadata.return_value = analytics_data_api.Metadata(), metadata - client.list_report_tasks( + client.get_metadata( request, metadata=[ ("key", "val"), @@ -12668,28 +13169,6 @@ def test_query_audience_list_empty_call_rest(): assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_sheet_export_audience_list_empty_call_rest(): - client = AlphaAnalyticsDataClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.sheet_export_audience_list), "__call__" - ) as call: - client.sheet_export_audience_list(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = analytics_data_api.SheetExportAudienceListRequest() - - assert args[0] == request_msg - - # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_get_audience_list_empty_call_rest(): @@ -12908,6 +13387,46 @@ def test_list_report_tasks_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_run_report_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.run_report), "__call__") as call: + client.run_report(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.RunReportRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_metadata_empty_call_rest(): + client = AlphaAnalyticsDataClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_metadata), "__call__") as call: + client.get_metadata(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_data_api.GetMetadataRequest() + + assert args[0] == request_msg + + def test_alpha_analytics_data_rest_lro_client(): client = AlphaAnalyticsDataClient( credentials=ga_credentials.AnonymousCredentials(), @@ -12961,7 +13480,6 @@ def test_alpha_analytics_data_base_transport(): "run_funnel_report", "create_audience_list", "query_audience_list", - "sheet_export_audience_list", "get_audience_list", "list_audience_lists", "create_recurring_audience_list", @@ -12972,6 +13490,8 @@ def test_alpha_analytics_data_base_transport(): "query_report_task", "get_report_task", "list_report_tasks", + "run_report", + "get_metadata", ) for method in methods: with pytest.raises(NotImplementedError): @@ -13016,9 +13536,6 @@ def test_alpha_analytics_data_base_transport_with_credentials_file(): default_scopes=( "https://www.googleapis.com/auth/analytics", "https://www.googleapis.com/auth/analytics.readonly", - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/spreadsheets", ), quota_project_id="octopus", ) @@ -13048,9 +13565,6 @@ def test_alpha_analytics_data_auth_adc(): default_scopes=( "https://www.googleapis.com/auth/analytics", "https://www.googleapis.com/auth/analytics.readonly", - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/spreadsheets", ), quota_project_id=None, ) @@ -13074,9 +13588,6 @@ def test_alpha_analytics_data_transport_auth_adc(transport_class): default_scopes=( "https://www.googleapis.com/auth/analytics", "https://www.googleapis.com/auth/analytics.readonly", - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/spreadsheets", ), quota_project_id="octopus", ) @@ -13133,9 +13644,6 @@ def test_alpha_analytics_data_transport_create_channel(transport_class, grpc_hel default_scopes=( "https://www.googleapis.com/auth/analytics", "https://www.googleapis.com/auth/analytics.readonly", - "https://www.googleapis.com/auth/drive", - "https://www.googleapis.com/auth/drive.file", - "https://www.googleapis.com/auth/spreadsheets", ), scopes=["1", "2"], default_host="analyticsdata.googleapis.com", @@ -13277,9 +13785,6 @@ def test_alpha_analytics_data_client_transport_session_collision(transport_name) session1 = client1.transport.query_audience_list._session session2 = client2.transport.query_audience_list._session assert session1 != session2 - session1 = client1.transport.sheet_export_audience_list._session - session2 = client2.transport.sheet_export_audience_list._session - assert session1 != session2 session1 = client1.transport.get_audience_list._session session2 = client2.transport.get_audience_list._session assert session1 != session2 @@ -13310,6 +13815,12 @@ def test_alpha_analytics_data_client_transport_session_collision(transport_name) session1 = client1.transport.list_report_tasks._session session2 = client2.transport.list_report_tasks._session assert session1 != session2 + session1 = client1.transport.run_report._session + session2 = client2.transport.run_report._session + assert session1 != session2 + session1 = client1.transport.get_metadata._session + session2 = client2.transport.get_metadata._session + assert session1 != session2 def test_alpha_analytics_data_grpc_transport_channel(): @@ -13496,8 +14007,28 @@ def test_parse_audience_list_path(): assert expected == actual -def test_property_quotas_snapshot_path(): +def test_metadata_path(): property = "oyster" + expected = "properties/{property}/metadata".format( + property=property, + ) + actual = AlphaAnalyticsDataClient.metadata_path(property) + assert expected == actual + + +def test_parse_metadata_path(): + expected = { + "property": "nudibranch", + } + path = AlphaAnalyticsDataClient.metadata_path(**expected) + + # Check that the path construction is reversible. + actual = AlphaAnalyticsDataClient.parse_metadata_path(path) + assert expected == actual + + +def test_property_quotas_snapshot_path(): + property = "cuttlefish" expected = "properties/{property}/propertyQuotasSnapshot".format( property=property, ) @@ -13507,7 +14038,7 @@ def test_property_quotas_snapshot_path(): def test_parse_property_quotas_snapshot_path(): expected = { - "property": "nudibranch", + "property": "mussel", } path = AlphaAnalyticsDataClient.property_quotas_snapshot_path(**expected) @@ -13517,8 +14048,8 @@ def test_parse_property_quotas_snapshot_path(): def test_recurring_audience_list_path(): - property = "cuttlefish" - recurring_audience_list = "mussel" + property = "winkle" + recurring_audience_list = "nautilus" expected = ( "properties/{property}/recurringAudienceLists/{recurring_audience_list}".format( property=property, @@ -13533,8 +14064,8 @@ def test_recurring_audience_list_path(): def test_parse_recurring_audience_list_path(): expected = { - "property": "winkle", - "recurring_audience_list": "nautilus", + "property": "scallop", + "recurring_audience_list": "abalone", } path = AlphaAnalyticsDataClient.recurring_audience_list_path(**expected) @@ -13544,8 +14075,8 @@ def test_parse_recurring_audience_list_path(): def test_report_task_path(): - property = "scallop" - report_task = "abalone" + property = "squid" + report_task = "clam" expected = "properties/{property}/reportTasks/{report_task}".format( property=property, report_task=report_task, @@ -13556,8 +14087,8 @@ def test_report_task_path(): def test_parse_report_task_path(): expected = { - "property": "squid", - "report_task": "clam", + "property": "whelk", + "report_task": "octopus", } path = AlphaAnalyticsDataClient.report_task_path(**expected) @@ -13567,7 +14098,7 @@ def test_parse_report_task_path(): def test_common_billing_account_path(): - billing_account = "whelk" + billing_account = "oyster" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -13577,7 +14108,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "octopus", + "billing_account": "nudibranch", } path = AlphaAnalyticsDataClient.common_billing_account_path(**expected) @@ -13587,7 +14118,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "oyster" + folder = "cuttlefish" expected = "folders/{folder}".format( folder=folder, ) @@ -13597,7 +14128,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nudibranch", + "folder": "mussel", } path = AlphaAnalyticsDataClient.common_folder_path(**expected) @@ -13607,7 +14138,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "cuttlefish" + organization = "winkle" expected = "organizations/{organization}".format( organization=organization, ) @@ -13617,7 +14148,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "mussel", + "organization": "nautilus", } path = AlphaAnalyticsDataClient.common_organization_path(**expected) @@ -13627,7 +14158,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "winkle" + project = "scallop" expected = "projects/{project}".format( project=project, ) @@ -13637,7 +14168,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "nautilus", + "project": "abalone", } path = AlphaAnalyticsDataClient.common_project_path(**expected) @@ -13647,8 +14178,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "scallop" - location = "abalone" + project = "squid" + location = "clam" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -13659,8 +14190,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "squid", - "location": "clam", + "project": "whelk", + "location": "octopus", } path = AlphaAnalyticsDataClient.common_location_path(**expected) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py index b4c89c91615c..decaea805c27 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance/__init__.py @@ -77,6 +77,7 @@ CloudControlCategory, CloudControlDetails, CloudProvider, + Control, ControlFamily, EnforcementMode, Framework, @@ -146,6 +147,7 @@ FindingSummary, FrameworkComplianceReport, FrameworkComplianceSummary, + FrameworkComplianceSummaryView, ListControlComplianceSummariesRequest, ListControlComplianceSummariesResponse, ListFindingSummariesRequest, @@ -155,6 +157,7 @@ ManualCloudControlAssessmentDetails, SimilarControls, TargetResourceDetails, + Trend, ) __all__ = ( @@ -194,6 +197,7 @@ "CELExpression", "CloudControl", "CloudControlDetails", + "Control", "ControlFamily", "Framework", "FrameworkReference", @@ -266,6 +270,8 @@ "ManualCloudControlAssessmentDetails", "SimilarControls", "TargetResourceDetails", + "Trend", "EvaluationState", "FindingClass", + "FrameworkComplianceSummaryView", ) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py index 740ae08c99e0..3897de7042a8 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/__init__.py @@ -70,6 +70,7 @@ CloudControlCategory, CloudControlDetails, CloudProvider, + Control, ControlFamily, EnforcementMode, Framework, @@ -139,6 +140,7 @@ FindingSummary, FrameworkComplianceReport, FrameworkComplianceSummary, + FrameworkComplianceSummaryView, ListControlComplianceSummariesRequest, ListControlComplianceSummariesResponse, ListFindingSummariesRequest, @@ -148,6 +150,7 @@ ManualCloudControlAssessmentDetails, SimilarControls, TargetResourceDetails, + Trend, ) if hasattr(api_core, "check_python_version") and hasattr( @@ -276,6 +279,7 @@ def _get_version(dependency_name): "CmEnrollmentServiceClient", "ComplianceState", "ConfigClient", + "Control", "ControlAssessmentDetails", "ControlComplianceSummary", "ControlFamily", @@ -302,6 +306,7 @@ def _get_version(dependency_name): "FrameworkCategory", "FrameworkComplianceReport", "FrameworkComplianceSummary", + "FrameworkComplianceSummaryView", "FrameworkDeployment", "FrameworkDeploymentReference", "FrameworkReference", @@ -351,6 +356,7 @@ def _get_version(dependency_name): "TargetResourceCreationConfig", "TargetResourceDetails", "TargetResourceType", + "Trend", "UpdateCloudControlRequest", "UpdateCmEnrollmentRequest", "UpdateFrameworkRequest", diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest.py index 046e99ee24ed..0b25167f615b 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest.py @@ -565,24 +565,41 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ], "google.longrunning.Operations.DeleteOperation": [ { "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.GetOperation": [ { "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.ListOperations": [ { "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ], } diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest_base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest_base.py index ea6f5cb6db30..b3ca4638dd66 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest_base.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/audit/transports/rest_base.py @@ -344,6 +344,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -369,6 +373,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -395,6 +403,11 @@ def _get_http_options(): "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -425,6 +438,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -450,6 +467,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -475,6 +496,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/cm_enrollment_service/transports/rest_base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/cm_enrollment_service/transports/rest_base.py index 04b1497d8c9a..fc6601abad79 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/cm_enrollment_service/transports/rest_base.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/cm_enrollment_service/transports/rest_base.py @@ -224,6 +224,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -249,6 +253,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -275,6 +283,11 @@ def _get_http_options(): "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -305,6 +318,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -330,6 +347,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -355,6 +376,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/async_client.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/async_client.py index c3dcf4645681..b0b0de847e3d 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/async_client.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/async_client.py @@ -337,8 +337,12 @@ async def sample_list_frameworks(): request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.ListFrameworksRequest, dict]]): The request object. Request message for [ListFrameworks][]. parent (:class:`str`): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -466,9 +470,12 @@ async def sample_get_framework(): request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.GetFrameworkRequest, dict]]): The request object. The request message for [GetFramework][]. name (:class:`str`): - Required. The name of the framework to retrieve, in the - format - ``organizations/{organization}/locations/{location}/frameworks/{framework_id}`` + Required. The name of the framework to retrieve, in one + of the following formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. This corresponds to the ``name`` field @@ -590,8 +597,12 @@ async def sample_create_framework(): request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.CreateFrameworkRequest, dict]]): The request object. The request message for [CreateFramework][]. parent (:class:`str`): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -870,8 +881,12 @@ async def sample_delete_framework(): request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.DeleteFrameworkRequest, dict]]): The request object. Request message for [DeleteFramework][]. name (:class:`str`): - Required. The name of the resource, in the format - ``organizations/{organization}/locations/{location}/frameworks/{framework}``. + Required. The name of the resource, in one of the + following formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. This corresponds to the ``name`` field @@ -976,8 +991,12 @@ async def sample_list_cloud_controls(): request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.ListCloudControlsRequest, dict]]): The request object. Request message for [ListCloudControls][]. parent (:class:`str`): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -1107,8 +1126,11 @@ async def sample_get_cloud_control(): The request object. The request message for [GetCloudControl][]. name (:class:`str`): Required. The name of the cloud control to retrieve, in - the format - ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}``. + one of the following formats: + ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}`` + or + ``projects/{project}/locations/{location}/cloudControls/{cloud_control}``. + The only supported location is ``global``. This corresponds to the ``name`` field @@ -1230,8 +1252,12 @@ async def sample_create_cloud_control(): request (Optional[Union[google.cloud.cloudsecuritycompliance_v1.types.CreateCloudControlRequest, dict]]): The request object. The request message for [CreateCloudControl][]. parent (:class:`str`): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}``. + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -1518,8 +1544,11 @@ async def sample_delete_cloud_control(): The request object. The request message for [DeleteCloudControl][]. name (:class:`str`): Required. The name of the cloud control to delete, in - the format - ``organizations/{organization}/locations/{location}/CloudControls/{CloudControl}``. + one of the following formats: + ``organizations/{organization}/locations/{location}/CloudControls/{CloudControl}`` + or + ``projects/{project}/locations/{location}/CloudControls/{CloudControl}``. + The only supported location is ``global``. This corresponds to the ``name`` field diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/client.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/client.py index 62bbc083c406..7bb7a84bf6f0 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/client.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/client.py @@ -798,8 +798,12 @@ def sample_list_frameworks(): request (Union[google.cloud.cloudsecuritycompliance_v1.types.ListFrameworksRequest, dict]): The request object. Request message for [ListFrameworks][]. parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -924,9 +928,12 @@ def sample_get_framework(): request (Union[google.cloud.cloudsecuritycompliance_v1.types.GetFrameworkRequest, dict]): The request object. The request message for [GetFramework][]. name (str): - Required. The name of the framework to retrieve, in the - format - ``organizations/{organization}/locations/{location}/frameworks/{framework_id}`` + Required. The name of the framework to retrieve, in one + of the following formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. This corresponds to the ``name`` field @@ -1045,8 +1052,12 @@ def sample_create_framework(): request (Union[google.cloud.cloudsecuritycompliance_v1.types.CreateFrameworkRequest, dict]): The request object. The request message for [CreateFramework][]. parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -1319,8 +1330,12 @@ def sample_delete_framework(): request (Union[google.cloud.cloudsecuritycompliance_v1.types.DeleteFrameworkRequest, dict]): The request object. Request message for [DeleteFramework][]. name (str): - Required. The name of the resource, in the format - ``organizations/{organization}/locations/{location}/frameworks/{framework}``. + Required. The name of the resource, in one of the + following formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. This corresponds to the ``name`` field @@ -1422,8 +1437,12 @@ def sample_list_cloud_controls(): request (Union[google.cloud.cloudsecuritycompliance_v1.types.ListCloudControlsRequest, dict]): The request object. Request message for [ListCloudControls][]. parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -1550,8 +1569,11 @@ def sample_get_cloud_control(): The request object. The request message for [GetCloudControl][]. name (str): Required. The name of the cloud control to retrieve, in - the format - ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}``. + one of the following formats: + ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}`` + or + ``projects/{project}/locations/{location}/cloudControls/{cloud_control}``. + The only supported location is ``global``. This corresponds to the ``name`` field @@ -1670,8 +1692,12 @@ def sample_create_cloud_control(): request (Union[google.cloud.cloudsecuritycompliance_v1.types.CreateCloudControlRequest, dict]): The request object. The request message for [CreateCloudControl][]. parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. + Required. The parent resource name, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}``. + - ``projects/{project}/locations/{location}``. + The only supported location is ``global``. This corresponds to the ``parent`` field @@ -1952,8 +1978,11 @@ def sample_delete_cloud_control(): The request object. The request message for [DeleteCloudControl][]. name (str): Required. The name of the cloud control to delete, in - the format - ``organizations/{organization}/locations/{location}/CloudControls/{CloudControl}``. + one of the following formats: + ``organizations/{organization}/locations/{location}/CloudControls/{CloudControl}`` + or + ``projects/{project}/locations/{location}/CloudControls/{CloudControl}``. + The only supported location is ``global``. This corresponds to the ``name`` field diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/transports/rest_base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/transports/rest_base.py index 1d3f9fd61974..ba6a5fc62bf2 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/transports/rest_base.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/config/transports/rest_base.py @@ -114,6 +114,11 @@ def _get_http_options(): "uri": "/v1/{parent=organizations/*/locations/*}/cloudControls", "body": "cloud_control", }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/cloudControls", + "body": "cloud_control", + }, ] return http_options @@ -173,6 +178,11 @@ def _get_http_options(): "uri": "/v1/{parent=organizations/*/locations/*}/frameworks", "body": "framework", }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/frameworks", + "body": "framework", + }, ] return http_options @@ -229,6 +239,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/cloudControls/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/cloudControls/*}", + }, ] return http_options @@ -276,6 +290,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/frameworks/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/frameworks/*}", + }, ] return http_options @@ -323,6 +341,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/cloudControls/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudControls/*}", + }, ] return http_options @@ -370,6 +392,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/frameworks/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/frameworks/*}", + }, ] return http_options @@ -417,6 +443,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{parent=organizations/*/locations/*}/cloudControls", }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudControls", + }, ] return http_options @@ -464,6 +494,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{parent=organizations/*/locations/*}/frameworks", }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/frameworks", + }, ] return http_options @@ -512,6 +546,11 @@ def _get_http_options(): "uri": "/v1/{cloud_control.name=organizations/*/locations/*/cloudControls/*}", "body": "cloud_control", }, + { + "method": "patch", + "uri": "/v1/{cloud_control.name=projects/*/locations/*/cloudControls/*}", + "body": "cloud_control", + }, ] return http_options @@ -569,6 +608,11 @@ def _get_http_options(): "uri": "/v1/{framework.name=organizations/*/locations/*/frameworks/*}", "body": "framework", }, + { + "method": "patch", + "uri": "/v1/{framework.name=projects/*/locations/*/frameworks/*}", + "body": "framework", + }, ] return http_options @@ -615,6 +659,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -640,6 +688,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -666,6 +718,11 @@ def _get_http_options(): "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -696,6 +753,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -721,6 +782,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -746,6 +811,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/async_client.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/async_client.py index 55c9c967baf1..181e47d6a3d0 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/async_client.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/async_client.py @@ -90,6 +90,8 @@ class DeploymentAsyncClient: parse_cloud_control_deployment_path = staticmethod( DeploymentClient.parse_cloud_control_deployment_path ) + framework_path = staticmethod(DeploymentClient.framework_path) + parse_framework_path = staticmethod(DeploymentClient.parse_framework_path) framework_deployment_path = staticmethod(DeploymentClient.framework_deployment_path) parse_framework_deployment_path = staticmethod( DeploymentClient.parse_framework_deployment_path @@ -369,8 +371,9 @@ async def sample_create_framework_deployment(): parent (:class:`str`): Required. The parent resource of the framework deployment in the format - ``organizations/{organization}/locations/{location}``. - Only the global location is supported. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. Only the + global location is supported. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -519,7 +522,9 @@ async def sample_delete_framework_deployment(): name (:class:`str`): Required. The name of the framework deployment that you want to delete, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. This corresponds to the ``name`` field @@ -649,7 +654,9 @@ async def sample_get_framework_deployment(): name (:class:`str`): Required. The name of the framework deployment, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. This corresponds to the ``name`` field @@ -767,8 +774,9 @@ async def sample_list_framework_deployments(): parent (:class:`str`): Required. The parent resource of the framework deployment, in the format - ``organizations/{organization}/locations/{location}``. - The only supported location is ``global``. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. The only + supported location is ``global``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -894,7 +902,9 @@ async def sample_get_cloud_control_deployment(): name (:class:`str`): Required. The name for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment_id}``. + ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}`` + or + ``projects/{project}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}``. The only supported location is ``global``. This corresponds to the ``name`` field @@ -1012,8 +1022,9 @@ async def sample_list_cloud_control_deployments(): parent (:class:`str`): Required. The parent resource for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}``. - The only supported location is ``global``. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. The only + supported location is ``global``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/client.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/client.py index c50f7612f07d..c1e6d458db13 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/client.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/client.py @@ -279,6 +279,28 @@ def parse_cloud_control_deployment_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def framework_path( + organization: str, + location: str, + framework: str, + ) -> str: + """Returns a fully-qualified framework string.""" + return "organizations/{organization}/locations/{location}/frameworks/{framework}".format( + organization=organization, + location=location, + framework=framework, + ) + + @staticmethod + def parse_framework_path(path: str) -> Dict[str, str]: + """Parses a framework path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/frameworks/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def framework_deployment_path( organization: str, @@ -840,8 +862,9 @@ def sample_create_framework_deployment(): parent (str): Required. The parent resource of the framework deployment in the format - ``organizations/{organization}/locations/{location}``. - Only the global location is supported. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. Only the + global location is supported. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -989,7 +1012,9 @@ def sample_delete_framework_deployment(): name (str): Required. The name of the framework deployment that you want to delete, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. This corresponds to the ``name`` field @@ -1118,7 +1143,9 @@ def sample_get_framework_deployment(): name (str): Required. The name of the framework deployment, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. This corresponds to the ``name`` field @@ -1233,8 +1260,9 @@ def sample_list_framework_deployments(): parent (str): Required. The parent resource of the framework deployment, in the format - ``organizations/{organization}/locations/{location}``. - The only supported location is ``global``. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. The only + supported location is ``global``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -1359,7 +1387,9 @@ def sample_get_cloud_control_deployment(): name (str): Required. The name for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment_id}``. + ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}`` + or + ``projects/{project}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}``. The only supported location is ``global``. This corresponds to the ``name`` field @@ -1476,8 +1506,9 @@ def sample_list_cloud_control_deployments(): parent (str): Required. The parent resource for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}``. - The only supported location is ``global``. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. The only + supported location is ``global``. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest.py index f314956e91b7..ba0cebd3f570 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest.py @@ -688,24 +688,41 @@ def operations_client(self) -> operations_v1.AbstractOperationsClient: "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ], "google.longrunning.Operations.DeleteOperation": [ { "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.GetOperation": [ { "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ], "google.longrunning.Operations.ListOperations": [ { "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ], } diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest_base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest_base.py index 9819a9cdfbe3..b5246e2a44ee 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest_base.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/deployment/transports/rest_base.py @@ -111,6 +111,11 @@ def _get_http_options(): "uri": "/v1/{parent=organizations/*/locations/*}/frameworkDeployments", "body": "framework_deployment", }, + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/frameworkDeployments", + "body": "framework_deployment", + }, ] return http_options @@ -167,6 +172,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/frameworkDeployments/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/frameworkDeployments/*}", + }, ] return http_options @@ -214,6 +223,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/cloudControlDeployments/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/cloudControlDeployments/*}", + }, ] return http_options @@ -261,6 +274,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/frameworkDeployments/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/frameworkDeployments/*}", + }, ] return http_options @@ -308,6 +325,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{parent=organizations/*/locations/*}/cloudControlDeployments", }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/cloudControlDeployments", + }, ] return http_options @@ -355,6 +376,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{parent=organizations/*/locations/*}/frameworkDeployments", }, + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/frameworkDeployments", + }, ] return http_options @@ -392,6 +417,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -417,6 +446,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -443,6 +476,11 @@ def _get_http_options(): "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -473,6 +511,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -498,6 +540,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -523,6 +569,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py index e20fb3e2cb9b..4f2719a2322a 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/services/monitoring/transports/rest_base.py @@ -377,6 +377,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}", + }, ] return http_options @@ -402,6 +406,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*}/locations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*}/locations", + }, ] return http_options @@ -428,6 +436,11 @@ def _get_http_options(): "uri": "/v1/{name=organizations/*/locations/*/operations/*}:cancel", "body": "*", }, + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/operations/*}:cancel", + "body": "*", + }, ] return http_options @@ -458,6 +471,10 @@ def _get_http_options(): "method": "delete", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -483,6 +500,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*/operations/*}", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/operations/*}", + }, ] return http_options @@ -508,6 +529,10 @@ def _get_http_options(): "method": "get", "uri": "/v1/{name=organizations/*/locations/*}/operations", }, + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*}/operations", + }, ] return http_options diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py index 52b649a192dc..314e89b149a7 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/__init__.py @@ -46,6 +46,7 @@ CloudControlCategory, CloudControlDetails, CloudProvider, + Control, ControlFamily, EnforcementMode, Framework, @@ -115,6 +116,7 @@ FindingSummary, FrameworkComplianceReport, FrameworkComplianceSummary, + FrameworkComplianceSummaryView, ListControlComplianceSummariesRequest, ListControlComplianceSummariesResponse, ListFindingSummariesRequest, @@ -124,6 +126,7 @@ ManualCloudControlAssessmentDetails, SimilarControls, TargetResourceDetails, + Trend, ) __all__ = ( @@ -153,6 +156,7 @@ "CELExpression", "CloudControl", "CloudControlDetails", + "Control", "ControlFamily", "Framework", "FrameworkReference", @@ -225,6 +229,8 @@ "ManualCloudControlAssessmentDetails", "SimilarControls", "TargetResourceDetails", + "Trend", "EvaluationState", "FindingClass", + "FrameworkComplianceSummaryView", ) diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/common.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/common.py index 3d138a56faa0..027d62cc3936 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/common.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/common.py @@ -49,6 +49,7 @@ "Rule", "CELExpression", "OperationMetadata", + "Control", "ControlFamily", }, ) @@ -162,6 +163,14 @@ class CloudControlCategory(proto.Enum): CC_CATEGORY_BCDR (15): The business continuity and disaster recovery (BCDR) category. + CC_CATEGORY_ADMIN_ACCESS (16): + The admin access category. + CC_CATEGORY_DATA_RESIDENCY (17): + DRZ (Data Residency). + CC_CATEGORY_RESOURCE_USAGE_RESTRICTION (18): + RUR (Resource Usage Restriction). + CC_CATEGORY_SERVICE_SPECIFIC (19): + SERVICE SPECIFIC """ CLOUD_CONTROL_CATEGORY_UNSPECIFIED = 0 @@ -180,6 +189,10 @@ class CloudControlCategory(proto.Enum): CC_CATEGORY_VULNERABILITY_MANAGEMENT = 13 CC_CATEGORY_PRIVACY = 14 CC_CATEGORY_BCDR = 15 + CC_CATEGORY_ADMIN_ACCESS = 16 + CC_CATEGORY_DATA_RESIDENCY = 17 + CC_CATEGORY_RESOURCE_USAGE_RESTRICTION = 18 + CC_CATEGORY_SERVICE_SPECIFIC = 19 class CloudProvider(proto.Enum): @@ -329,9 +342,12 @@ class Framework(proto.Message): Attributes: name (str): - Required. Identifier. The name of the framework, in the - format - ``organizations/{organization}/locations/{location}/frameworks/{framework_id}``. + Required. Identifier. The name of the framework, in one of + the following formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. major_revision_id (int): Output only. The major version of the @@ -437,8 +453,12 @@ class CloudControlDetails(proto.Message): Attributes: name (str): - Required. The name of the cloud control, in the format - ``organizations/{organization}/locations/{location}/cloudControls/{cloud-control}``. + Required. The name of the cloud control, in one of the + following formats: + ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}`` + or + ``projects/{project}/locations/{location}/cloudControls/{cloud_control}``. + The only supported location is ``global``. major_revision_id (int): Required. The major version of the cloud @@ -467,8 +487,11 @@ class CloudControlDetails(proto.Message): class FrameworkReference(proto.Message): - r"""The reference of a framework, in the format - ``organizations/{organization}/locations/{location}/frameworks/{framework}``. + r"""The reference of a framework, in one of the following formats: + + - ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + - ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. @@ -528,9 +551,12 @@ class CloudControl(proto.Message): Attributes: name (str): - Required. Identifier. The name of the cloud control, in the - format - ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control_id}``. + Required. Identifier. The name of the cloud control, in + either of the formats: + ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}`` + or + ``projects/{project}/locations/{location}/cloudControls/{cloud_control}``. + The only supported location is ``global``. major_revision_id (int): Output only. The major version of the cloud @@ -896,23 +922,23 @@ class ParamValue(proto.Message): Attributes: string_value (str): - A string value. + Optional. A string value. This field is a member of `oneof`_ ``kind``. bool_value (bool): - A boolean value. + Optional. A boolean value. This field is a member of `oneof`_ ``kind``. string_list_value (google.cloud.cloudsecuritycompliance_v1.types.StringList): - A repeated string. + Optional. A repeated string. This field is a member of `oneof`_ ``kind``. number_value (float): - A double value. + Optional. A double value. This field is a member of `oneof`_ ``kind``. oneof_value (google.cloud.cloudsecuritycompliance_v1.types.Parameter): - Sub-parameter values. + Optional. Sub-parameter values. This field is a member of `oneof`_ ``kind``. """ @@ -1144,6 +1170,182 @@ class OperationMetadata(proto.Message): ) +class Control(proto.Message): + r"""The regulatory control. + + Attributes: + name (str): + Output only. The name of a regulatory control, in one of the + following formats: + + - ``organizations/{organization}/locations/{location}/controls/{control}`` + - ``projects/{project}/locations/{location}/controls/{control}``. + + The only supported location is ``global``. + display_name (str): + Output only. The friendly name for the + regulatory control. + description (str): + Output only. The description of the + regulatory control. + family (google.cloud.cloudsecuritycompliance_v1.types.Control.Family): + Output only. The regulatory group that the + control belongs to. + control_family (google.cloud.cloudsecuritycompliance_v1.types.ControlFamily): + Output only. The regulatory family that the + control belongs to. + responsibility_type (google.cloud.cloudsecuritycompliance_v1.types.RegulatoryControlResponsibilityType): + Output only. The entity that's responsible + for the control, whether Google, you as the + customer, or both. + google_responsibility_description (str): + Output only. A description of Google's + responsibility for the regulatory control. + google_responsibility_implementation (str): + Output only. A description of Google's + responsibility for implementing the regulatory + control. + customer_responsibility_description (str): + Output only. A description of your + responsibility for the regulatory control. + customer_responsibility_implementation (str): + Output only. A description of the your + responsibility for implementing the regulatory + control. + shared_responsibility_description (str): + Output only. A description of the + responsibility that's shared between Google and + you in implementing this control. + additional_content_uri (str): + Output only. A link to the documentation + that's related to this control. + related_frameworks (MutableSequence[str]): + Output only. The frameworks that include this + control. + """ + + class Family(proto.Enum): + r"""The regulatory control family. + + Values: + FAMILY_UNSPECIFIED (0): + Default value. This value is unused. + AC (1): + Access control + AT (2): + Awareness and araining + AU (3): + Audit and accountability + CA (4): + Certification, accreditation, and security + assessments + CM (5): + Configuration management + CP (6): + Contingency planning + IA (7): + Identification and authentication + IR (8): + Incident response + MA (9): + Maintenance + MP (10): + Media protection + PE (11): + Physical and environmental protection + PL (12): + Security planning + PS (13): + Personnel aecurity + RA (14): + Risk assessment + SA (15): + System services and acquisition + SC (16): + System and communications protection + SI (17): + System and information integrity + SR (18): + Supply chain risk management + """ + + FAMILY_UNSPECIFIED = 0 + AC = 1 + AT = 2 + AU = 3 + CA = 4 + CM = 5 + CP = 6 + IA = 7 + IR = 8 + MA = 9 + MP = 10 + PE = 11 + PL = 12 + PS = 13 + RA = 14 + SA = 15 + SC = 16 + SI = 17 + SR = 18 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=3, + ) + description: str = proto.Field( + proto.STRING, + number=4, + ) + family: Family = proto.Field( + proto.ENUM, + number=5, + enum=Family, + ) + control_family: "ControlFamily" = proto.Field( + proto.MESSAGE, + number=6, + message="ControlFamily", + ) + responsibility_type: "RegulatoryControlResponsibilityType" = proto.Field( + proto.ENUM, + number=7, + enum="RegulatoryControlResponsibilityType", + ) + google_responsibility_description: str = proto.Field( + proto.STRING, + number=8, + ) + google_responsibility_implementation: str = proto.Field( + proto.STRING, + number=9, + ) + customer_responsibility_description: str = proto.Field( + proto.STRING, + number=10, + ) + customer_responsibility_implementation: str = proto.Field( + proto.STRING, + number=11, + ) + shared_responsibility_description: str = proto.Field( + proto.STRING, + number=12, + ) + additional_content_uri: str = proto.Field( + proto.STRING, + number=13, + ) + related_frameworks: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=14, + ) + + class ControlFamily(proto.Message): r"""The regulatory family of the control. diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/config.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/config.py index 03b81dd052c4..1b17ee877cde 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/config.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/config.py @@ -46,9 +46,13 @@ class ListFrameworksRequest(proto.Message): Attributes: parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. The - only supported location is ``global``. + Required. The parent resource name, in one of the following + formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + + The only supported location is ``global``. page_size (int): Optional. The maximum number of frameworks to return. The default value is ``500``. @@ -108,9 +112,12 @@ class GetFrameworkRequest(proto.Message): Attributes: name (str): - Required. The name of the framework to retrieve, in the - format - ``organizations/{organization}/locations/{location}/frameworks/{framework_id}`` + Required. The name of the framework to retrieve, in one of + the following formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. major_revision_id (int): Optional. The framework major version to retrieve. If not @@ -133,9 +140,13 @@ class CreateFrameworkRequest(proto.Message): Attributes: parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. The - only supported location is ``global``. + Required. The parent resource name, in one of the following + formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + + The only supported location is ``global``. framework_id (str): Required. The identifier (ID) of the framework. The ID is not the full name of the @@ -199,8 +210,12 @@ class DeleteFrameworkRequest(proto.Message): Attributes: name (str): - Required. The name of the resource, in the format - ``organizations/{organization}/locations/{location}/frameworks/{framework}``. + Required. The name of the resource, in one of the following + formats: + ``organizations/{organization}/locations/{location}/frameworks/{framework}`` + or + ``projects/{project}/locations/{location}/frameworks/{framework}``. + The only supported location is ``global``. """ @@ -215,9 +230,13 @@ class ListCloudControlsRequest(proto.Message): Attributes: parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. The - only supported location is ``global``. + Required. The parent resource name, in one of the following + formats: + + - ``organizations/{organization}/locations/{location}`` + - ``projects/{project}/locations/{location}``. + + The only supported location is ``global``. page_size (int): Optional. The maximum number of cloud controls to return. The default value is ``500``. @@ -280,9 +299,12 @@ class GetCloudControlRequest(proto.Message): Attributes: name (str): - Required. The name of the cloud control to retrieve, in the - format - ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}``. + Required. The name of the cloud control to retrieve, in one + of the following formats: + ``organizations/{organization}/locations/{location}/cloudControls/{cloud_control}`` + or + ``projects/{project}/locations/{location}/cloudControls/{cloud_control}``. + The only supported location is ``global``. major_revision_id (int): Optional. The major version of the cloud control to @@ -305,9 +327,13 @@ class CreateCloudControlRequest(proto.Message): Attributes: parent (str): - Required. The parent resource name, in the format - ``organizations/{organization}/locations/{location}``. The - only supported location is ``global``. + Required. The parent resource name, in one of the following + formats: + + - ``organizations/{organization}/locations/{location}``. + - ``projects/{project}/locations/{location}``. + + The only supported location is ``global``. cloud_control_id (str): Required. The identifier for the cloud control, which is the last segment of the cloud control name. The format is @@ -373,9 +399,12 @@ class DeleteCloudControlRequest(proto.Message): Attributes: name (str): - Required. The name of the cloud control to delete, in the - format - ``organizations/{organization}/locations/{location}/CloudControls/{CloudControl}``. + Required. The name of the cloud control to delete, in one of + the following formats: + ``organizations/{organization}/locations/{location}/CloudControls/{CloudControl}`` + or + ``projects/{project}/locations/{location}/CloudControls/{CloudControl}``. + The only supported location is ``global``. """ diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/deployment.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/deployment.py index 554cda6f4f39..6a134833fefe 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/deployment.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/deployment.py @@ -59,6 +59,8 @@ class DeploymentState(proto.Enum): Deployment is being created. DEPLOYMENT_STATE_DELETING (3): Deployment is being deleted. + DEPLOYMENT_STATE_UPDATING (8): + Deployment is being updated. DEPLOYMENT_STATE_FAILED (4): Deployment has failed. All the changes made by the deployment were successfully rolled back. @@ -82,6 +84,7 @@ class DeploymentState(proto.Enum): DEPLOYMENT_STATE_VALIDATING = 1 DEPLOYMENT_STATE_CREATING = 2 DEPLOYMENT_STATE_DELETING = 3 + DEPLOYMENT_STATE_UPDATING = 8 DEPLOYMENT_STATE_FAILED = 4 DEPLOYMENT_STATE_READY = 5 DEPLOYMENT_STATE_PARTIALLY_DEPLOYED = 6 @@ -97,7 +100,9 @@ class FrameworkDeployment(proto.Message): name (str): Identifier. The name of the framework deployment, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. target_resource_config (google.cloud.cloudsecuritycompliance_v1.types.TargetResourceConfig): Required. The details of the target resource @@ -235,7 +240,9 @@ class CloudControlDeployment(proto.Message): name (str): Identifier. The name for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment_id}``. + ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}`` + or + ``projects/{project}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}``. The only supported location is ``global``. target_resource_config (google.cloud.cloudsecuritycompliance_v1.types.TargetResourceConfig): Required. The details of the target resource @@ -507,8 +514,9 @@ class CreateFrameworkDeploymentRequest(proto.Message): parent (str): Required. The parent resource of the framework deployment in the format - ``organizations/{organization}/locations/{location}``. Only - the global location is supported. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. Only the global + location is supported. framework_deployment_id (str): Optional. An identifier for the framework deployment that's unique in scope of the parent. @@ -541,7 +549,9 @@ class DeleteFrameworkDeploymentRequest(proto.Message): name (str): Required. The name of the framework deployment that you want to delete, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. etag (str): Optional. An opaque identifier for the current version of @@ -572,7 +582,9 @@ class GetFrameworkDeploymentRequest(proto.Message): name (str): Required. The name of the framework deployment, in the format - ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. """ @@ -589,8 +601,9 @@ class ListFrameworkDeploymentsRequest(proto.Message): parent (str): Required. The parent resource of the framework deployment, in the format - ``organizations/{organization}/locations/{location}``. The - only supported location is ``global``. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. The only + supported location is ``global``. page_size (int): Optional. The requested page size. The server might return fewer items than requested. @@ -669,7 +682,9 @@ class GetCloudControlDeploymentRequest(proto.Message): name (str): Required. The name for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment_id}``. + ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}`` + or + ``projects/{project}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}``. The only supported location is ``global``. """ @@ -686,8 +701,9 @@ class ListCloudControlDeploymentsRequest(proto.Message): parent (str): Required. The parent resource for the cloud control deployment, in the format - ``organizations/{organization}/locations/{location}``. The - only supported location is ``global``. + ``organizations/{organization}/locations/{location}`` or + ``projects/{project}/locations/{location}``. The only + supported location is ``global``. page_size (int): Optional. The requested page size. The server might return fewer items than you requested. @@ -767,7 +783,9 @@ class CloudControlDeploymentReference(proto.Message): cloud_control_deployment (str): Output only. The name of the CloudControlDeployment. The format is - ``organizations/{org}/locations/{location}/cloudControlDeployments/{cloud_control_deployment_id}``. + ``organizations/{organization}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}`` + or + ``projects/{project}/locations/{location}/cloudControlDeployments/{cloud_control_deployment}``. The only supported location is ``global``. """ @@ -784,7 +802,9 @@ class FrameworkDeploymentReference(proto.Message): framework_deployment (str): Output only. The name of the framework deployment, in the format - ``organizations/{org}/locations/{location}/frameworkDeployments/{framework_deployment_id}``. + ``organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}`` + or + ``projects/{project}/locations/{location}/frameworkDeployments/{framework_deployment}``. The only supported location is ``global``. framework_reference (google.cloud.cloudsecuritycompliance_v1.types.FrameworkReference): Optional. The reference to the framework that this @@ -794,7 +814,7 @@ class FrameworkDeploymentReference(proto.Message): { framework: - "organizations/{org}/locations/{location}/frameworks/{framework}", + "organizations/{organization}/locations/{location}/frameworks/{framework}", major_revision_id: 1 } diff --git a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py index 05c7f5ef108c..b5ca28ddcf52 100644 --- a/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py +++ b/packages/google-cloud-cloudsecuritycompliance/google/cloud/cloudsecuritycompliance_v1/types/monitoring.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +import google.protobuf.duration_pb2 as duration_pb2 # type: ignore import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.type.interval_pb2 as interval_pb2 # type: ignore import proto # type: ignore @@ -28,6 +29,7 @@ manifest={ "EvaluationState", "FindingClass", + "FrameworkComplianceSummaryView", "ListFrameworkComplianceSummariesRequest", "ListFrameworkComplianceSummariesResponse", "FrameworkComplianceReport", @@ -48,6 +50,7 @@ "SimilarControls", "AggregatedComplianceReport", "TargetResourceDetails", + "Trend", }, ) @@ -122,6 +125,30 @@ class FindingClass(proto.Enum): CHOKEPOINT = 9 +class FrameworkComplianceSummaryView(proto.Enum): + r"""Specifies the view of the framework compliance summary to be + returned. New values may be added in the future. + + Values: + FRAMEWORK_COMPLIANCE_SUMMARY_VIEW_UNSPECIFIED (0): + The default / unset value. The API will + default to the BASIC view. + FRAMEWORK_COMPLIANCE_SUMMARY_VIEW_BASIC (1): + Includes basic compliance metadata, but omits + trend data. + FRAMEWORK_COMPLIANCE_SUMMARY_VIEW_FULL (2): + Includes all information, including + [finding_count][google.cloud.cloudsecuritycompliance.v1main.FrameworkComplianceSummary.finding_count] + and + [controls_passing_trend][google.cloud.cloudsecuritycompliance.v1main.FrameworkComplianceSummary.controls_passing_trend]. + Trend data is provided for the last 30 days. + """ + + FRAMEWORK_COMPLIANCE_SUMMARY_VIEW_UNSPECIFIED = 0 + FRAMEWORK_COMPLIANCE_SUMMARY_VIEW_BASIC = 1 + FRAMEWORK_COMPLIANCE_SUMMARY_VIEW_FULL = 2 + + class ListFrameworkComplianceSummariesRequest(proto.Message): r"""The request message for [ListFrameworkComplianceSummariesRequest][google.cloud.cloudsecuritycompliance.v1.ListFrameworkComplianceSummariesRequest]. @@ -140,6 +167,9 @@ class ListFrameworkComplianceSummariesRequest(proto.Message): results that the server should return. filter (str): Optional. The filtering results. + view (google.cloud.cloudsecuritycompliance_v1.types.FrameworkComplianceSummaryView): + Optional. Specifies the level of detail to + return in the response. """ parent: str = proto.Field( @@ -158,6 +188,11 @@ class ListFrameworkComplianceSummariesRequest(proto.Message): proto.STRING, number=4, ) + view: "FrameworkComplianceSummaryView" = proto.Field( + proto.ENUM, + number=5, + enum="FrameworkComplianceSummaryView", + ) class ListFrameworkComplianceSummariesResponse(proto.Message): @@ -296,6 +331,8 @@ class FetchFrameworkComplianceReportRequest(proto.Message): compliance report to retrieve. end_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The end time of the report. + filter (str): + Optional. The filtering results. """ name: str = proto.Field( @@ -307,6 +344,10 @@ class FetchFrameworkComplianceReportRequest(proto.Message): number=2, message=timestamp_pb2.Timestamp, ) + filter: str = proto.Field( + proto.STRING, + number=3, + ) class ListFindingSummariesRequest(proto.Message): @@ -566,6 +607,12 @@ class FrameworkComplianceSummary(proto.Message): target_resource_details (MutableSequence[google.cloud.cloudsecuritycompliance_v1.types.TargetResourceDetails]): The target resource details for the framework. + finding_count (int): + Output only. The count of the findings + generated against the framework. + controls_passing_trend (google.cloud.cloudsecuritycompliance_v1.types.Trend): + Output only. The trend of controls that are + passing for the given duration. """ framework: str = proto.Field( @@ -619,6 +666,15 @@ class FrameworkComplianceSummary(proto.Message): message="TargetResourceDetails", ) ) + finding_count: int = proto.Field( + proto.INT64, + number=11, + ) + controls_passing_trend: "Trend" = proto.Field( + proto.MESSAGE, + number=12, + message="Trend", + ) class FindingSummary(proto.Message): @@ -1033,4 +1089,26 @@ class TargetResourceDetails(proto.Message): ) +class Trend(proto.Message): + r"""The trend of a compliance metric. + + Attributes: + duration (google.protobuf.duration_pb2.Duration): + Output only. The duration for the trend. + value_percent (float): + Output only. The trend value as a percentage. + The value can be positive or negative. + """ + + duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + value_percent: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_deployment.py b/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_deployment.py index 7c7006eaf873..b7d4b804bdaa 100644 --- a/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_deployment.py +++ b/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_deployment.py @@ -7600,10 +7600,36 @@ def test_parse_cloud_control_deployment_path(): assert expected == actual -def test_framework_deployment_path(): +def test_framework_path(): organization = "squid" location = "clam" - framework_deployment = "whelk" + framework = "whelk" + expected = "organizations/{organization}/locations/{location}/frameworks/{framework}".format( + organization=organization, + location=location, + framework=framework, + ) + actual = DeploymentClient.framework_path(organization, location, framework) + assert expected == actual + + +def test_parse_framework_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "framework": "nudibranch", + } + path = DeploymentClient.framework_path(**expected) + + # Check that the path construction is reversible. + actual = DeploymentClient.parse_framework_path(path) + assert expected == actual + + +def test_framework_deployment_path(): + organization = "cuttlefish" + location = "mussel" + framework_deployment = "winkle" expected = "organizations/{organization}/locations/{location}/frameworkDeployments/{framework_deployment}".format( organization=organization, location=location, @@ -7617,9 +7643,9 @@ def test_framework_deployment_path(): def test_parse_framework_deployment_path(): expected = { - "organization": "octopus", - "location": "oyster", - "framework_deployment": "nudibranch", + "organization": "nautilus", + "location": "scallop", + "framework_deployment": "abalone", } path = DeploymentClient.framework_deployment_path(**expected) @@ -7629,7 +7655,7 @@ def test_parse_framework_deployment_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "squid" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -7639,7 +7665,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "clam", } path = DeploymentClient.common_billing_account_path(**expected) @@ -7649,7 +7675,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "whelk" expected = "folders/{folder}".format( folder=folder, ) @@ -7659,7 +7685,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "octopus", } path = DeploymentClient.common_folder_path(**expected) @@ -7669,7 +7695,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "oyster" expected = "organizations/{organization}".format( organization=organization, ) @@ -7679,7 +7705,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "nudibranch", } path = DeploymentClient.common_organization_path(**expected) @@ -7689,7 +7715,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "cuttlefish" expected = "projects/{project}".format( project=project, ) @@ -7699,7 +7725,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "mussel", } path = DeploymentClient.common_project_path(**expected) @@ -7709,8 +7735,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "winkle" + location = "nautilus" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -7721,8 +7747,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "scallop", + "location": "abalone", } path = DeploymentClient.common_location_path(**expected) diff --git a/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py b/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py index f3af1de65ee2..a8703a40e2db 100644 --- a/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py +++ b/packages/google-cloud-cloudsecuritycompliance/tests/unit/gapic/cloudsecuritycompliance_v1/test_monitoring.py @@ -2447,6 +2447,7 @@ def test_fetch_framework_compliance_report_non_empty_request_with_auto_populated # if they meet the requirements of AIP 4235. request = monitoring.FetchFrameworkComplianceReportRequest( name="name_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2461,6 +2462,7 @@ def test_fetch_framework_compliance_report_non_empty_request_with_auto_populated _, args, _ = call.mock_calls[0] assert args[0] == monitoring.FetchFrameworkComplianceReportRequest( name="name_value", + filter="filter_value", ) @@ -3725,6 +3727,7 @@ def test_list_framework_compliance_summaries_rest_required_fields( "filter", "page_size", "page_token", + "view", ) ) jsonified_request.update(unset_fields) @@ -3791,6 +3794,7 @@ def test_list_framework_compliance_summaries_rest_unset_required_fields(): "filter", "pageSize", "pageToken", + "view", ) ) & set(("parent",)) @@ -4261,7 +4265,12 @@ def test_fetch_framework_compliance_report_rest_required_fields( credentials=ga_credentials.AnonymousCredentials() ).fetch_framework_compliance_report._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("end_time",)) + assert not set(unset_fields) - set( + ( + "end_time", + "filter", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -4318,7 +4327,15 @@ def test_fetch_framework_compliance_report_rest_unset_required_fields(): unset_fields = ( transport.fetch_framework_compliance_report._get_unset_required_fields({}) ) - assert set(unset_fields) == (set(("endTime",)) & set(("name",))) + assert set(unset_fields) == ( + set( + ( + "endTime", + "filter", + ) + ) + & set(("name",)) + ) def test_fetch_framework_compliance_report_rest_flattened(): diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/__init__.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/__init__.py index 519e31e79a41..15899d14fc1c 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/__init__.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/__init__.py @@ -392,11 +392,14 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentParams, CommitmentResourceStatus, CommitmentsScopedList, CompositeHealthCheck, CompositeHealthCheckAggregatedList, + CompositeHealthCheckHealth, CompositeHealthCheckList, + CompositeHealthChecksGetHealthResponseHealthSourceHealth, CompositeHealthChecksScopedList, ConfidentialInstanceConfig, ConfigureAcceleratorTopologiesInstanceGroupManagerRequest, @@ -565,6 +568,7 @@ DiskTypeAggregatedList, DiskTypeList, DiskTypesScopedList, + DiskUpdateKmsKeyRequest, DisplayDevice, DistributionPolicy, DistributionPolicyZoneConfiguration, @@ -607,6 +611,7 @@ ForwardingRulesScopedList, FutureReservation, FutureReservationCommitmentInfo, + FutureReservationParams, FutureReservationsAggregatedListResponse, FutureReservationsListResponse, FutureReservationSpecificSKUProperties, @@ -663,6 +668,8 @@ GetHealthBackendServiceRequest, GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, + GetHealthRegionCompositeHealthCheckRequest, + GetHealthRegionHealthSourceRequest, GetHealthTargetPoolRequest, GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, @@ -870,7 +877,10 @@ HealthChecksScopedList, HealthSource, HealthSourceAggregatedList, + HealthSourceHealth, HealthSourceList, + HealthSourcesGetHealthResponseSourceInfo, + HealthSourcesGetHealthResponseSourceInfoBackendInfo, HealthSourcesScopedList, HealthStatus, HealthStatusForNetworkEndpoint, @@ -1023,6 +1033,7 @@ InstanceGroupManagersAbandonInstancesRequest, InstanceGroupManagersApplyUpdatesRequest, InstanceGroupManagersConfigureAcceleratorTopologiesRequest, + InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration, InstanceGroupManagersCreateInstancesRequest, InstanceGroupManagersDeleteInstancesRequest, InstanceGroupManagersDeletePerInstanceConfigsReq, @@ -1618,6 +1629,7 @@ RegionDisksResizeRequest, RegionDisksStartAsyncReplicationRequest, RegionDiskTypeList, + RegionDiskUpdateKmsKeyRequest, RegionInstanceGroupList, RegionInstanceGroupManagerDeleteInstanceConfigReq, RegionInstanceGroupManagerList, @@ -1651,6 +1663,7 @@ RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, RegionSetPolicyRequest, + RegionSnapshotUpdateKmsKeyRequest, RegionTargetHttpsProxiesSetSslCertificatesRequest, RegionUrlMapsValidateRequest, RemoveAssociationFirewallPolicyRequest, @@ -1764,6 +1777,7 @@ RolloutsListResponse, RolloutWaveDetails, RolloutWaveDetailsOrchestratedWaveDetails, + RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus, Route, RouteAsPath, RouteList, @@ -1991,6 +2005,7 @@ SnapshotSettingsStorageLocationSettings, SnapshotSettingsStorageLocationSettingsStorageLocationPreference, SnapshotsScopedList, + SnapshotUpdateKmsKeyRequest, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -2201,6 +2216,10 @@ UpdateHealthCheckRequest, UpdateInstanceGroupManagerRequest, UpdateInstanceRequest, + UpdateKmsKeyDiskRequest, + UpdateKmsKeyRegionDiskRequest, + UpdateKmsKeyRegionSnapshotRequest, + UpdateKmsKeySnapshotRequest, UpdateLicenseRequest, UpdateNamedSetRouterRequest, UpdateNetworkInterfaceInstanceRequest, @@ -2227,9 +2246,11 @@ UpdateZoneVmExtensionPolicyRequest, UrlMap, UrlMapList, + UrlMapQuotaUsage, UrlMapReference, UrlMapsAggregatedList, UrlMapsScopedList, + UrlMapStatus, UrlMapsValidateRequest, UrlMapsValidateResponse, UrlMapTest, @@ -2626,11 +2647,14 @@ def _get_version(dependency_name): "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConfigureAcceleratorTopologiesInstanceGroupManagerRequest", @@ -2796,6 +2820,7 @@ def _get_version(dependency_name): "DiskTypeList", "DiskTypesClient", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisksAddResourcePoliciesRequest", "DisksClient", "DisksRemoveResourcePoliciesRequest", @@ -2849,6 +2874,7 @@ def _get_version(dependency_name): "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationSpecificSKUProperties", "FutureReservationStatus", "FutureReservationStatusExistingMatchingUsageInfo", @@ -2908,6 +2934,8 @@ def _get_version(dependency_name): "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -3124,7 +3152,10 @@ def _get_version(dependency_name): "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -3292,6 +3323,7 @@ def _get_version(dependency_name): "InstanceGroupManagersApplyUpdatesRequest", "InstanceGroupManagersClient", "InstanceGroupManagersConfigureAcceleratorTopologiesRequest", + "InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration", "InstanceGroupManagersCreateInstancesRequest", "InstanceGroupManagersDeleteInstancesRequest", "InstanceGroupManagersDeletePerInstanceConfigsReq", @@ -3907,6 +3939,7 @@ def _get_version(dependency_name): "RegionDiskSettingsClient", "RegionDiskTypeList", "RegionDiskTypesClient", + "RegionDiskUpdateKmsKeyRequest", "RegionDisksAddResourcePoliciesRequest", "RegionDisksClient", "RegionDisksRemoveResourcePoliciesRequest", @@ -3965,6 +3998,7 @@ def _get_version(dependency_name): "RegionSetLabelsRequest", "RegionSetPolicyRequest", "RegionSnapshotSettingsClient", + "RegionSnapshotUpdateKmsKeyRequest", "RegionSnapshotsClient", "RegionSslCertificatesClient", "RegionSslPoliciesClient", @@ -4092,6 +4126,7 @@ def _get_version(dependency_name): "RolloutRolloutEntityOrchestratedEntity", "RolloutWaveDetails", "RolloutWaveDetailsOrchestratedWaveDetails", + "RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus", "RolloutsClient", "RolloutsListResponse", "Route", @@ -4327,6 +4362,7 @@ def _get_version(dependency_name): "SnapshotSettingsServiceClient", "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + "SnapshotUpdateKmsKeyRequest", "SnapshotsClient", "SnapshotsScopedList", "SourceDiskEncryptionKey", @@ -4551,6 +4587,10 @@ def _get_version(dependency_name): "UpdateHealthCheckRequest", "UpdateInstanceGroupManagerRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNamedSetRouterRequest", "UpdateNetworkInterfaceInstanceRequest", @@ -4577,7 +4617,9 @@ def _get_version(dependency_name): "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", + "UrlMapQuotaUsage", "UrlMapReference", + "UrlMapStatus", "UrlMapTest", "UrlMapTestHeader", "UrlMapValidationResult", diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/gapic_metadata.json b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/gapic_metadata.json index dfd1b7e34bec..e69e2ffa5b72 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/gapic_metadata.json +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/gapic_metadata.json @@ -490,6 +490,11 @@ "methods": [ "update" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } @@ -3368,6 +3373,11 @@ "get" ] }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, "Insert": { "methods": [ "insert" @@ -3519,6 +3529,11 @@ "methods": [ "update" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } @@ -3676,6 +3691,11 @@ "get" ] }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, "Insert": { "methods": [ "insert" @@ -4508,6 +4528,11 @@ "methods": [ "test_iam_permissions" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } @@ -5491,6 +5516,11 @@ "methods": [ "test_iam_permissions" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/client.py index 45d6fe58ae2e..25f2d0778262 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/client.py @@ -5642,6 +5642,329 @@ def error_code(self): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[Union[compute.UpdateKmsKeyDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disk_update_kms_key_request_resource: Optional[ + compute.DiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.DisksClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeyDiskRequest, dict]): + The request object. A request message for + Disks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_update_kms_key_request_resource (google.cloud.compute_v1beta.types.DiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, disk, disk_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyDiskRequest): + request = compute.UpdateKmsKeyDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disk_update_kms_key_request_resource is not None: + request.disk_update_kms_key_request_resource = ( + disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[Union[compute.UpdateKmsKeyDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disk_update_kms_key_request_resource: Optional[ + compute.DiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.DisksClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeyDiskRequest, dict]): + The request object. A request message for + Disks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_update_kms_key_request_resource (google.cloud.compute_v1beta.types.DiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, disk, disk_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyDiskRequest): + request = compute.UpdateKmsKeyDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disk_update_kms_key_request_resource is not None: + request.disk_update_kms_key_request_resource = ( + disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "DisksClient": return self diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/base.py index 7811d4c13281..d146277dcd57 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/base.py @@ -282,6 +282,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -464,6 +469,15 @@ def update( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeyDiskRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest.py index c9862884f992..068786ed4252 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest.py @@ -224,6 +224,14 @@ def post_update(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = DisksRestTransport(interceptor=MyCustomDisksInterceptor()) client = DisksClient(transport=transport) @@ -1105,6 +1113,52 @@ def post_update_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeyDiskRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeyDiskRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Disks server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DisksRestStub: @@ -4590,6 +4644,186 @@ def __call__( ) return resp + class _UpdateKmsKey(_BaseDisksRestTransport._BaseUpdateKmsKey, DisksRestStub): + def __hash__(self): + return hash("DisksRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeyDiskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeyDiskRequest): + The request object. A request message for + Disks.UpdateKmsKey. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseDisksRestTransport._BaseUpdateKmsKey._get_http_options() + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = ( + _BaseDisksRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseDisksRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseDisksRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1beta.DisksClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1beta.Disks", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DisksRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1beta.DisksClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1beta.Disks", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def add_resource_policies( self, @@ -4734,6 +4968,14 @@ def update(self) -> Callable[[compute.UpdateDiskRequest], compute.Operation]: # In C++ this would require a dynamic_cast return self._Update(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeyDiskRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest_base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest_base.py index f0b1a99f1adf..c146079d4460 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest_base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/disks/transports/rest_base.py @@ -1091,5 +1091,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/beta/projects/{project}/zones/{zone}/disks/{disk}/updateKmsKey", + "body": "disk_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeyDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseDisksRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseDisksRestTransport",) diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/client.py index 20192ea81f12..a54520a1fbf9 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/client.py @@ -1297,6 +1297,146 @@ def sample_get(): # Done; return the response. return response + def get_health( + self, + request: Optional[ + Union[compute.GetHealthRegionCompositeHealthCheckRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + composite_health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.CompositeHealthCheckHealth: + r"""Gets the most recent health check results for this + regional CompositeHealthCheck. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_get_health(): + # Create a client + client = compute_v1beta.RegionCompositeHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1beta.GetHealthRegionCompositeHealthCheckRequest( + composite_health_check="composite_health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.GetHealthRegionCompositeHealthCheckRequest, dict]): + The request object. A request message for + RegionCompositeHealthChecks.GetHealth. + See the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + composite_health_check (str): + Name of the CompositeHealthCheck + resource to get health for. + + This corresponds to the ``composite_health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1beta.types.CompositeHealthCheckHealth: + Response message for + RegionCompositeHealthChecks.GetHealth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, composite_health_check] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetHealthRegionCompositeHealthCheckRequest): + request = compute.GetHealthRegionCompositeHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if composite_health_check is not None: + request.composite_health_check = composite_health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("composite_health_check", request.composite_health_check), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def insert_unary( self, request: Optional[ diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/base.py index 5b5dcf9b1015..6785ac8d4c5b 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/base.py @@ -182,6 +182,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=600.0, @@ -253,6 +268,18 @@ def get( ]: raise NotImplementedError() + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionCompositeHealthCheckRequest], + Union[ + compute.CompositeHealthCheckHealth, + Awaitable[compute.CompositeHealthCheckHealth], + ], + ]: + raise NotImplementedError() + @property def insert( self, diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest.py index d744772076d3..f90aee065bb2 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest.py @@ -96,6 +96,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -282,6 +290,57 @@ def post_get_with_metadata( """ return response, metadata + def pre_get_health( + self, + request: compute.GetHealthRegionCompositeHealthCheckRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetHealthRegionCompositeHealthCheckRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCompositeHealthChecks server. + """ + return request, metadata + + def post_get_health( + self, response: compute.CompositeHealthCheckHealth + ) -> compute.CompositeHealthCheckHealth: + """Post-rpc interceptor for get_health + + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionCompositeHealthChecks server but before + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. + """ + return response + + def post_get_health_with_metadata( + self, + response: compute.CompositeHealthCheckHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.CompositeHealthCheckHealth, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCompositeHealthChecks server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionCompositeHealthCheckRequest, @@ -1053,6 +1112,157 @@ def __call__( ) return resp + class _GetHealth( + _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth, + RegionCompositeHealthChecksRestStub, + ): + def __hash__(self): + return hash("RegionCompositeHealthChecksRestTransport.GetHealth") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetHealthRegionCompositeHealthCheckRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.CompositeHealthCheckHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthRegionCompositeHealthCheckRequest): + The request object. A request message for + RegionCompositeHealthChecks.GetHealth. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.CompositeHealthCheckHealth: + Response message for + RegionCompositeHealthChecks.GetHealth + + """ + + http_options = _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_http_options() + + request, metadata = self._interceptor.pre_get_health(request, metadata) + transcoded_request = _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1beta.RegionCompositeHealthChecksClient.GetHealth", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionCompositeHealthChecks", + "rpcName": "GetHealth", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + RegionCompositeHealthChecksRestTransport._GetHealth._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.CompositeHealthCheckHealth() + pb_resp = compute.CompositeHealthCheckHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.CompositeHealthCheckHealth.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1beta.RegionCompositeHealthChecksClient.get_health", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionCompositeHealthChecks", + "rpcName": "GetHealth", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _Insert( _BaseRegionCompositeHealthChecksRestTransport._BaseInsert, RegionCompositeHealthChecksRestStub, @@ -1738,6 +1948,17 @@ def get( # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionCompositeHealthCheckRequest], + compute.CompositeHealthCheckHealth, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest_base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest_base.py index fcda0bb6524a..e0db2db03c09 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest_base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_composite_health_checks/transports/rest_base.py @@ -229,6 +229,52 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseGetHealth: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/beta/projects/{project}/regions/{region}/compositeHealthChecks/{composite_health_check}/getHealth", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetHealthRegionCompositeHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BaseInsert: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/client.py index d4b240551945..ae876f13a5b6 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/client.py @@ -5242,6 +5242,339 @@ def error_code(self): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[Union[compute.UpdateKmsKeyRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disk_update_kms_key_request_resource: Optional[ + compute.RegionDiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeyRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disk_update_kms_key_request_resource (google.cloud.compute_v1beta.types.RegionDiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + disk, + region_disk_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionDiskRequest): + request = compute.UpdateKmsKeyRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disk_update_kms_key_request_resource is not None: + request.region_disk_update_kms_key_request_resource = ( + region_disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[Union[compute.UpdateKmsKeyRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disk_update_kms_key_request_resource: Optional[ + compute.RegionDiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeyRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disk_update_kms_key_request_resource (google.cloud.compute_v1beta.types.RegionDiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + disk, + region_disk_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionDiskRequest): + request = compute.UpdateKmsKeyRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disk_update_kms_key_request_resource is not None: + request.region_disk_update_kms_key_request_resource = ( + region_disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "RegionDisksClient": return self diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/base.py index 272af67eb5a3..a1446f4a4cb5 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/base.py @@ -262,6 +262,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -427,6 +432,15 @@ def update( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeyRegionDiskRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest.py index 774134308b45..276f4f2068dd 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest.py @@ -208,6 +208,14 @@ def post_update(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = RegionDisksRestTransport(interceptor=MyCustomRegionDisksInterceptor()) client = RegionDisksClient(transport=transport) @@ -1010,6 +1018,52 @@ def post_update_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeyRegionDiskRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeyRegionDiskRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionDisksRestStub: @@ -4180,6 +4234,192 @@ def __call__( ) return resp + class _UpdateKmsKey( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey, RegionDisksRestStub + ): + def __hash__(self): + return hash("RegionDisksRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeyRegionDiskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeyRegionDiskRequest): + The request object. A request message for + RegionDisks.UpdateKmsKey. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1beta.RegionDisksClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionDisks", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionDisksRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1beta.RegionDisksClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionDisks", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def add_resource_policies( self, @@ -4312,6 +4552,14 @@ def update(self) -> Callable[[compute.UpdateRegionDiskRequest], compute.Operatio # In C++ this would require a dynamic_cast return self._Update(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeyRegionDiskRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest_base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest_base.py index 082eb05377fc..72bf764c3592 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest_base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_disks/transports/rest_base.py @@ -989,5 +989,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/beta/projects/{project}/regions/{region}/disks/{disk}/updateKmsKey", + "body": "region_disk_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeyRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseRegionDisksRestTransport",) diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/client.py index f883fc64b578..923fb2685b45 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/client.py @@ -1280,6 +1280,146 @@ def sample_get(): # Done; return the response. return response + def get_health( + self, + request: Optional[ + Union[compute.GetHealthRegionHealthSourceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_source: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.HealthSourceHealth: + r"""Gets the most recent health check results for this + regional HealthSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_get_health(): + # Create a client + client = compute_v1beta.RegionHealthSourcesClient() + + # Initialize request argument(s) + request = compute_v1beta.GetHealthRegionHealthSourceRequest( + health_source="health_source_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.GetHealthRegionHealthSourceRequest, dict]): + The request object. A request message for + RegionHealthSources.GetHealth. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_source (str): + Name of the HealthSource resource to + get health for. + + This corresponds to the ``health_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1beta.types.HealthSourceHealth: + Response message for + RegionHealthSources.GetHealth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, health_source] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetHealthRegionHealthSourceRequest): + request = compute.GetHealthRegionHealthSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_source is not None: + request.health_source = health_source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("health_source", request.health_source), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def insert_unary( self, request: Optional[Union[compute.InsertRegionHealthSourceRequest, dict]] = None, diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/base.py index 1bb813af34ad..70ea4af5e19b 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/base.py @@ -182,6 +182,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=600.0, @@ -253,6 +268,15 @@ def get( ]: raise NotImplementedError() + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionHealthSourceRequest], + Union[compute.HealthSourceHealth, Awaitable[compute.HealthSourceHealth]], + ]: + raise NotImplementedError() + @property def insert( self, diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest.py index d4c1b986568b..6b52d48aea2a 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest.py @@ -96,6 +96,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -277,6 +285,55 @@ def post_get_with_metadata( """ return response, metadata + def pre_get_health( + self, + request: compute.GetHealthRegionHealthSourceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetHealthRegionHealthSourceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthSources server. + """ + return request, metadata + + def post_get_health( + self, response: compute.HealthSourceHealth + ) -> compute.HealthSourceHealth: + """Post-rpc interceptor for get_health + + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionHealthSources server but before + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. + """ + return response + + def post_get_health_with_metadata( + self, + response: compute.HealthSourceHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthSourceHealth, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthSources server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionHealthSourceRequest, @@ -1042,6 +1099,155 @@ def __call__( ) return resp + class _GetHealth( + _BaseRegionHealthSourcesRestTransport._BaseGetHealth, + RegionHealthSourcesRestStub, + ): + def __hash__(self): + return hash("RegionHealthSourcesRestTransport.GetHealth") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetHealthRegionHealthSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.HealthSourceHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthRegionHealthSourceRequest): + The request object. A request message for + RegionHealthSources.GetHealth. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.HealthSourceHealth: + Response message for + RegionHealthSources.GetHealth + + """ + + http_options = ( + _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_health(request, metadata) + transcoded_request = _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1beta.RegionHealthSourcesClient.GetHealth", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionHealthSources", + "rpcName": "GetHealth", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionHealthSourcesRestTransport._GetHealth._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthSourceHealth() + pb_resp = compute.HealthSourceHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.HealthSourceHealth.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1beta.RegionHealthSourcesClient.get_health", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionHealthSources", + "rpcName": "GetHealth", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _Insert( _BaseRegionHealthSourcesRestTransport._BaseInsert, RegionHealthSourcesRestStub ): @@ -1736,6 +1942,16 @@ def get( # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionHealthSourceRequest], compute.HealthSourceHealth + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest_base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest_base.py index 377b45f8e9da..88056e0790ee 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest_base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_health_sources/transports/rest_base.py @@ -225,6 +225,52 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseGetHealth: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/beta/projects/{project}/regions/{region}/healthSources/{health_source}/getHealth", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetHealthRegionHealthSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BaseInsert: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/client.py index 6a415b6d4fbf..0fa57983bd32 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/client.py @@ -2410,6 +2410,339 @@ def sample_test_iam_permissions(): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[ + Union[compute.UpdateKmsKeyRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + region_snapshot_update_kms_key_request_resource: Optional[ + compute.RegionSnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeyRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.UpdateKmsKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_snapshot_update_kms_key_request_resource (google.cloud.compute_v1beta.types.RegionSnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + snapshot, + region_snapshot_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionSnapshotRequest): + request = compute.UpdateKmsKeyRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + if region_snapshot_update_kms_key_request_resource is not None: + request.region_snapshot_update_kms_key_request_resource = ( + region_snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[ + Union[compute.UpdateKmsKeyRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + region_snapshot_update_kms_key_request_resource: Optional[ + compute.RegionSnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeyRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.UpdateKmsKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_snapshot_update_kms_key_request_resource (google.cloud.compute_v1beta.types.RegionSnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + snapshot, + region_snapshot_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionSnapshotRequest): + request = compute.UpdateKmsKeyRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + if region_snapshot_update_kms_key_request_resource is not None: + request.region_snapshot_update_kms_key_request_resource = ( + region_snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "RegionSnapshotsClient": return self diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/base.py index 71bf876ca8c8..8c5dcf0389f5 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/base.py @@ -217,6 +217,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -302,6 +307,15 @@ def test_iam_permissions( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeyRegionSnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest.py index b18f58591d1c..bbc215c186a1 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest.py @@ -136,6 +136,14 @@ def post_test_iam_permissions(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = RegionSnapshotsRestTransport(interceptor=MyCustomRegionSnapshotsInterceptor()) client = RegionSnapshotsClient(transport=transport) @@ -517,6 +525,53 @@ def post_test_iam_permissions_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeyRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeyRegionSnapshotRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionSnapshotsRestStub: @@ -2060,6 +2115,186 @@ def __call__( ) return resp + class _UpdateKmsKey( + _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeyRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeyRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.UpdateKmsKey. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1beta.RegionSnapshotsClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionSnapshots", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1beta.RegionSnapshotsClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1beta.RegionSnapshots", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def delete( self, @@ -2125,6 +2360,14 @@ def test_iam_permissions( # In C++ this would require a dynamic_cast return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeyRegionSnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest_base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest_base.py index 02a07bc9b15d..7d6b86513f31 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest_base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/region_snapshots/transports/rest_base.py @@ -495,5 +495,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/beta/projects/{project}/regions/{region}/snapshots/{snapshot}/updateKmsKey", + "body": "region_snapshot_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeyRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseRegionSnapshotsRestTransport",) diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/client.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/client.py index 68fb38f359f5..2eb713171152 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/client.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/client.py @@ -2376,6 +2376,304 @@ def sample_test_iam_permissions(): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[Union[compute.UpdateKmsKeySnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + snapshot_update_kms_key_request_resource: Optional[ + compute.SnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeySnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeySnapshotRequest, dict]): + The request object. A request message for + Snapshots.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_update_kms_key_request_resource (google.cloud.compute_v1beta.types.SnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, snapshot, snapshot_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeySnapshotRequest): + request = compute.UpdateKmsKeySnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + if snapshot_update_kms_key_request_resource is not None: + request.snapshot_update_kms_key_request_resource = ( + snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[Union[compute.UpdateKmsKeySnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + snapshot_update_kms_key_request_resource: Optional[ + compute.SnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1beta + + def sample_update_kms_key(): + # Create a client + client = compute_v1beta.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeySnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1beta.types.UpdateKmsKeySnapshotRequest, dict]): + The request object. A request message for + Snapshots.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_update_kms_key_request_resource (google.cloud.compute_v1beta.types.SnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, snapshot, snapshot_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeySnapshotRequest): + request = compute.UpdateKmsKeySnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + if snapshot_update_kms_key_request_resource is not None: + request.snapshot_update_kms_key_request_resource = ( + snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "SnapshotsClient": return self diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/base.py index 4f8a07dc244c..70728911065b 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/base.py @@ -232,6 +232,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -328,6 +333,15 @@ def test_iam_permissions( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeySnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest.py index fd9590b1d782..eb19690b8b6c 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest.py @@ -144,6 +144,14 @@ def post_test_iam_permissions(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = SnapshotsRestTransport(interceptor=MyCustomSnapshotsInterceptor()) client = SnapshotsClient(transport=transport) @@ -563,6 +571,52 @@ def post_test_iam_permissions_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeySnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeySnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SnapshotsRestStub: @@ -2245,6 +2299,190 @@ def __call__( ) return resp + class _UpdateKmsKey( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey, SnapshotsRestStub + ): + def __hash__(self): + return hash("SnapshotsRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeySnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeySnapshotRequest): + The request object. A request message for + Snapshots.UpdateKmsKey. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = ( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1beta.SnapshotsClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1beta.Snapshots", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SnapshotsRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1beta.SnapshotsClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1beta.Snapshots", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def aggregated_list( self, @@ -2313,6 +2551,14 @@ def test_iam_permissions( # In C++ this would require a dynamic_cast return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeySnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest_base.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest_base.py index ba62996b8353..7e0739d45a98 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest_base.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/services/snapshots/transports/rest_base.py @@ -541,5 +541,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/beta/projects/{project}/global/snapshots/{snapshot}/updateKmsKey", + "body": "snapshot_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeySnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseSnapshotsRestTransport",) diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/__init__.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/__init__.py index 2d686778593a..28de75c42d65 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/__init__.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/__init__.py @@ -237,11 +237,14 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentParams, CommitmentResourceStatus, CommitmentsScopedList, CompositeHealthCheck, CompositeHealthCheckAggregatedList, + CompositeHealthCheckHealth, CompositeHealthCheckList, + CompositeHealthChecksGetHealthResponseHealthSourceHealth, CompositeHealthChecksScopedList, ConfidentialInstanceConfig, ConfigureAcceleratorTopologiesInstanceGroupManagerRequest, @@ -410,6 +413,7 @@ DiskTypeAggregatedList, DiskTypeList, DiskTypesScopedList, + DiskUpdateKmsKeyRequest, DisplayDevice, DistributionPolicy, DistributionPolicyZoneConfiguration, @@ -452,6 +456,7 @@ ForwardingRulesScopedList, FutureReservation, FutureReservationCommitmentInfo, + FutureReservationParams, FutureReservationsAggregatedListResponse, FutureReservationsListResponse, FutureReservationSpecificSKUProperties, @@ -508,6 +513,8 @@ GetHealthBackendServiceRequest, GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, + GetHealthRegionCompositeHealthCheckRequest, + GetHealthRegionHealthSourceRequest, GetHealthTargetPoolRequest, GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, @@ -715,7 +722,10 @@ HealthChecksScopedList, HealthSource, HealthSourceAggregatedList, + HealthSourceHealth, HealthSourceList, + HealthSourcesGetHealthResponseSourceInfo, + HealthSourcesGetHealthResponseSourceInfoBackendInfo, HealthSourcesScopedList, HealthStatus, HealthStatusForNetworkEndpoint, @@ -868,6 +878,7 @@ InstanceGroupManagersAbandonInstancesRequest, InstanceGroupManagersApplyUpdatesRequest, InstanceGroupManagersConfigureAcceleratorTopologiesRequest, + InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration, InstanceGroupManagersCreateInstancesRequest, InstanceGroupManagersDeleteInstancesRequest, InstanceGroupManagersDeletePerInstanceConfigsReq, @@ -1463,6 +1474,7 @@ RegionDisksResizeRequest, RegionDisksStartAsyncReplicationRequest, RegionDiskTypeList, + RegionDiskUpdateKmsKeyRequest, RegionInstanceGroupList, RegionInstanceGroupManagerDeleteInstanceConfigReq, RegionInstanceGroupManagerList, @@ -1496,6 +1508,7 @@ RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, RegionSetPolicyRequest, + RegionSnapshotUpdateKmsKeyRequest, RegionTargetHttpsProxiesSetSslCertificatesRequest, RegionUrlMapsValidateRequest, RemoveAssociationFirewallPolicyRequest, @@ -1609,6 +1622,7 @@ RolloutsListResponse, RolloutWaveDetails, RolloutWaveDetailsOrchestratedWaveDetails, + RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus, Route, RouteAsPath, RouteList, @@ -1836,6 +1850,7 @@ SnapshotSettingsStorageLocationSettings, SnapshotSettingsStorageLocationSettingsStorageLocationPreference, SnapshotsScopedList, + SnapshotUpdateKmsKeyRequest, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -2046,6 +2061,10 @@ UpdateHealthCheckRequest, UpdateInstanceGroupManagerRequest, UpdateInstanceRequest, + UpdateKmsKeyDiskRequest, + UpdateKmsKeyRegionDiskRequest, + UpdateKmsKeyRegionSnapshotRequest, + UpdateKmsKeySnapshotRequest, UpdateLicenseRequest, UpdateNamedSetRouterRequest, UpdateNetworkInterfaceInstanceRequest, @@ -2072,9 +2091,11 @@ UpdateZoneVmExtensionPolicyRequest, UrlMap, UrlMapList, + UrlMapQuotaUsage, UrlMapReference, UrlMapsAggregatedList, UrlMapsScopedList, + UrlMapStatus, UrlMapsValidateRequest, UrlMapsValidateResponse, UrlMapTest, @@ -2371,11 +2392,14 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConfigureAcceleratorTopologiesInstanceGroupManagerRequest", @@ -2544,6 +2568,7 @@ "DiskTypeAggregatedList", "DiskTypeList", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisplayDevice", "DistributionPolicy", "DistributionPolicyZoneConfiguration", @@ -2586,6 +2611,7 @@ "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationsAggregatedListResponse", "FutureReservationsListResponse", "FutureReservationSpecificSKUProperties", @@ -2642,6 +2668,8 @@ "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -2849,7 +2877,10 @@ "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -3002,6 +3033,7 @@ "InstanceGroupManagersAbandonInstancesRequest", "InstanceGroupManagersApplyUpdatesRequest", "InstanceGroupManagersConfigureAcceleratorTopologiesRequest", + "InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration", "InstanceGroupManagersCreateInstancesRequest", "InstanceGroupManagersDeleteInstancesRequest", "InstanceGroupManagersDeletePerInstanceConfigsReq", @@ -3597,6 +3629,7 @@ "RegionDisksResizeRequest", "RegionDisksStartAsyncReplicationRequest", "RegionDiskTypeList", + "RegionDiskUpdateKmsKeyRequest", "RegionInstanceGroupList", "RegionInstanceGroupManagerDeleteInstanceConfigReq", "RegionInstanceGroupManagerList", @@ -3630,6 +3663,7 @@ "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", "RegionSetPolicyRequest", + "RegionSnapshotUpdateKmsKeyRequest", "RegionTargetHttpsProxiesSetSslCertificatesRequest", "RegionUrlMapsValidateRequest", "RemoveAssociationFirewallPolicyRequest", @@ -3743,6 +3777,7 @@ "RolloutsListResponse", "RolloutWaveDetails", "RolloutWaveDetailsOrchestratedWaveDetails", + "RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus", "Route", "RouteAsPath", "RouteList", @@ -3970,6 +4005,7 @@ "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", "SnapshotsScopedList", + "SnapshotUpdateKmsKeyRequest", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -4180,6 +4216,10 @@ "UpdateHealthCheckRequest", "UpdateInstanceGroupManagerRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNamedSetRouterRequest", "UpdateNetworkInterfaceInstanceRequest", @@ -4206,9 +4246,11 @@ "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", + "UrlMapQuotaUsage", "UrlMapReference", "UrlMapsAggregatedList", "UrlMapsScopedList", + "UrlMapStatus", "UrlMapsValidateRequest", "UrlMapsValidateResponse", "UrlMapTest", diff --git a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py index cab92f169e7a..2fb5627cac49 100644 --- a/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py +++ b/packages/google-cloud-compute-v1beta/google/cloud/compute_v1beta/types/compute.py @@ -247,11 +247,14 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConfigureAcceleratorTopologiesInstanceGroupManagerRequest", @@ -414,6 +417,7 @@ "DiskTypeAggregatedList", "DiskTypeList", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisksAddResourcePoliciesRequest", "DisksRemoveResourcePoliciesRequest", "DisksResizeRequest", @@ -462,6 +466,7 @@ "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationSpecificSKUProperties", "FutureReservationStatus", "FutureReservationStatusExistingMatchingUsageInfo", @@ -520,6 +525,8 @@ "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -728,7 +735,10 @@ "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -892,6 +902,7 @@ "InstanceGroupManagersAbandonInstancesRequest", "InstanceGroupManagersApplyUpdatesRequest", "InstanceGroupManagersConfigureAcceleratorTopologiesRequest", + "InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration", "InstanceGroupManagersCreateInstancesRequest", "InstanceGroupManagersDeleteInstancesRequest", "InstanceGroupManagersDeletePerInstanceConfigsReq", @@ -1469,6 +1480,7 @@ "RegionAutoscalerList", "RegionCommitmentsUpdateReservationsRequest", "RegionDiskTypeList", + "RegionDiskUpdateKmsKeyRequest", "RegionDisksAddResourcePoliciesRequest", "RegionDisksRemoveResourcePoliciesRequest", "RegionDisksResizeRequest", @@ -1506,6 +1518,7 @@ "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", "RegionSetPolicyRequest", + "RegionSnapshotUpdateKmsKeyRequest", "RegionTargetHttpsProxiesSetSslCertificatesRequest", "RegionUrlMapsValidateRequest", "RemoveAssociationFirewallPolicyRequest", @@ -1618,6 +1631,7 @@ "RolloutRolloutEntityOrchestratedEntity", "RolloutWaveDetails", "RolloutWaveDetailsOrchestratedWaveDetails", + "RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus", "RolloutsListResponse", "Route", "RouteAsPath", @@ -1846,6 +1860,7 @@ "SnapshotSettingsAccessLocationAccessLocationPreference", "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + "SnapshotUpdateKmsKeyRequest", "SnapshotsScopedList", "SourceDiskEncryptionKey", "SourceInstanceParams", @@ -2056,6 +2071,10 @@ "UpdateHealthCheckRequest", "UpdateInstanceGroupManagerRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNamedSetRouterRequest", "UpdateNetworkInterfaceInstanceRequest", @@ -2082,7 +2101,9 @@ "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", + "UrlMapQuotaUsage", "UrlMapReference", + "UrlMapStatus", "UrlMapTest", "UrlMapTestHeader", "UrlMapValidationResult", @@ -19506,6 +19527,16 @@ class Backend(proto.Message): possible values. This field is a member of `oneof`_ ``_preference``. + service (str): + Represents a service backend (e.g., Cloud Run + service, PSC Service Attachment). + e.g. + "run.googleapis.com/projects/123456789/locations/us-central1/services/my-service" + for Cloud Run service. + "compute.googleapis.com/projects/123456789/regions/us-central1/serviceAttachments/my-service-attachment" + for PSC Service Attachment. + + This field is a member of `oneof`_ ``_service``. traffic_duration (str): Check the TrafficDuration enum for the list of possible values. @@ -19697,6 +19728,11 @@ class TrafficDuration(proto.Enum): number=150781147, optional=True, ) + service: str = proto.Field( + proto.STRING, + number=373540533, + optional=True, + ) traffic_duration: str = proto.Field( proto.STRING, number=11618710, @@ -20759,7 +20795,7 @@ class BackendService(proto.Message): :: - - A regional backend service with the service_protocol set to HTTP, + - A regional backend service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the @@ -21031,7 +21067,7 @@ class BackendService(proto.Message): metrics to use for computing the weights are specified via thecustomMetrics field. This field is applicable to either: - - A regional backend service with the service_protocol set to HTTP, + - A regional backend service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the @@ -21162,7 +21198,7 @@ class BackendService(proto.Message): - A global backend service with the loadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL_MANAGED. - A regional backend - service with the serviceProtocol set to HTTP, HTTPS, HTTP2 or H2C, and + service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and loadBalancingScheme set to INTERNAL_MANAGED or EXTERNAL_MANAGED. Not supported for Serverless NEGs. @@ -21504,7 +21540,7 @@ class LocalityLbPolicy(proto.Enum): metrics to use for computing the weights are specified via thecustomMetrics field. This field is applicable to either: - - A regional backend service with the service_protocol set to HTTP, + - A regional backend service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the @@ -24987,7 +25023,8 @@ class BulkInsertDiskResource(proto.Message): This field is a member of `oneof`_ ``_instant_snapshot_group_parameters``. snapshot_group_parameters (google.cloud.compute_v1beta.types.SnapshotGroupParameters): - The parameters for the snapshot group. + The parameters for the snapshot group. The + usage of snapshot group feature is restricted. This field is a member of `oneof`_ ``_snapshot_group_parameters``. source_consistency_group_policy (str): @@ -25726,64 +25763,74 @@ class CachePolicy(proto.Message): Attributes: cache_bypass_request_header_names (MutableSequence[str]): - Bypass the cache when the specified request - headers are matched by name, e.g. Pragma or - Authorization headers. Values are - case-insensitive. Up to 5 header names can be - specified. The cache is bypassed for all - cachePolicy.cacheMode settings. + Bypass the cache when the specified request headers are + matched by name, e.g. Pragma or Authorization headers. + Values are case-insensitive. Up to 5 header names can be + specified. The cache is bypassed for all ``cacheMode`` + values. cache_key_policy (google.cloud.compute_v1beta.types.CachePolicyCacheKeyPolicy): - The CacheKeyPolicy for this CachePolicy. + The cache key configuration. If not + specified, the default behavior depends on the + backend type: for Backend Services, the complete + request URI is used; for Backend Buckets, the + request URI is used without the protocol or + host, and only query parameters known to Cloud + Storage are included. This field is a member of `oneof`_ ``_cache_key_policy``. cache_mode (str): Specifies the cache setting for all responses from this - route. If not specified, the default value is - CACHE_ALL_STATIC. Check the CacheMode enum for the list of - possible values. + route. If not specified, Cloud CDN uses ``CACHE_ALL_STATIC`` + mode. Check the CacheMode enum for the list of possible + values. This field is a member of `oneof`_ ``_cache_mode``. client_ttl (google.cloud.compute_v1beta.types.Duration): Specifies a separate client (e.g. browser client) maximum - TTL. This is used to clamp the max-age (or Expires) value - sent to the client. With FORCE_CACHE_ALL, the lesser of - client_ttl and default_ttl is used for the response max-age - directive, along with a "public" directive. For cacheable - content in CACHE_ALL_STATIC mode, client_ttl clamps the - max-age from the origin (if specified), or else sets the - response max-age directive to the lesser of the client_ttl - and default_ttl, and also ensures a "public" cache-control - directive is present. If a client TTL is not specified, a - default value (1 hour) will be used. The maximum allowed - value is 31,622,400s (1 year). + TTL for cached content. This is used to clamp the max-age + (or Expires) value sent to the client. With + ``FORCE_CACHE_ALL``, the lesser of ``clientTtl`` and + ``defaultTtl`` is used for the response max-age directive, + along with a "public" directive. For cacheable content in + ``CACHE_ALL_STATIC`` mode, ``clientTtl`` clamps the max-age + from the origin (if specified), or else sets the response + max-age directive to the lesser of the ``clientTtl`` and + ``defaultTtl``, and also ensures a "public" cache-control + directive is present. The maximum allowed value is + 31,622,400s (1 year). If not specified, Cloud CDN uses 3600s + (1 hour) for ``CACHE_ALL_STATIC`` mode. Cannot exceed + ``maxTtl``. Cannot be specified when ``cacheMode`` is + ``USE_ORIGIN_HEADERS``. This field is a member of `oneof`_ ``_client_ttl``. default_ttl (google.cloud.compute_v1beta.types.Duration): - Specifies the default TTL for cached content served by this - origin for responses that do not have an existing valid TTL - (max-age or s-maxage). Setting a TTL of "0" means "always - revalidate". The value of defaultTTL cannot be set to a - value greater than that of maxTTL. When the cacheMode is set - to FORCE_CACHE_ALL, the defaultTTL will overwrite the TTL - set in all responses. The maximum allowed value is + Specifies the default TTL for cached content for responses + that do not have an existing valid TTL (max-age or + s-maxage). Setting a TTL of "0" means "always revalidate". + The value of ``defaultTtl`` cannot be set to a value greater + than that of ``maxTtl``. When the ``cacheMode`` is set to + ``FORCE_CACHE_ALL``, the ``defaultTtl`` will overwrite the + TTL set in all responses. The maximum allowed value is 31,622,400s (1 year). Infrequently accessed objects may be - evicted from the cache before the defined TTL. + evicted from the cache before the defined TTL. If not + specified, Cloud CDN uses 3600s (1 hour) for + ``CACHE_ALL_STATIC`` and ``FORCE_CACHE_ALL`` modes. Cannot + be specified when ``cacheMode`` is ``USE_ORIGIN_HEADERS``. This field is a member of `oneof`_ ``_default_ttl``. max_ttl (google.cloud.compute_v1beta.types.Duration): - Specifies the maximum allowed TTL for cached - content served by this origin. - Cache directives that attempt to set a max-age - or s-maxage higher than this, or an Expires - header more than maxTTL seconds in the future - will be capped at the value of maxTTL, as if it - were the value of an s-maxage Cache-Control - directive. - Headers sent to the client will not be modified. - Setting a TTL of "0" means "always revalidate". - The maximum allowed value is 31,622,400s (1 - year). Infrequently accessed objects may be - evicted from the cache before the defined TTL. + Specifies the maximum allowed TTL for cached content. Cache + directives that attempt to set a max-age or s-maxage higher + than this, or an Expires header more than ``maxTtl`` seconds + in the future will be capped at the value of ``maxTtl``, as + if it were the value of an s-maxage Cache-Control directive. + Headers sent to the client will not be modified. Setting a + TTL of "0" means "always revalidate". The maximum allowed + value is 31,622,400s (1 year). Infrequently accessed objects + may be evicted from the cache before the defined TTL. If not + specified, Cloud CDN uses 86400s (1 day) for + ``CACHE_ALL_STATIC`` mode. Can be specified only for + ``CACHE_ALL_STATIC`` cache mode. This field is a member of `oneof`_ ``_max_ttl``. negative_caching (bool): @@ -25791,35 +25838,41 @@ class CachePolicy(proto.Message): order to apply fine-grained caching for common errors or redirects. This can reduce the load on your origin and improve end-user experience by reducing response latency. - When the cache mode is set to CACHE_ALL_STATIC or - USE_ORIGIN_HEADERS, negative caching applies to responses - with the specified response code that lack any + When the ``cacheMode`` is set to ``CACHE_ALL_STATIC`` or + ``USE_ORIGIN_HEADERS``, negative caching applies to + responses with the specified response code that lack any Cache-Control, Expires, or Pragma: no-cache directives. When - the cache mode is set to FORCE_CACHE_ALL, negative caching - applies to all responses with the specified response code, - and override any caching headers. By default, Cloud CDN will - apply the following default TTLs to these status codes: HTTP - 300 (Multiple Choice), 301, 308 (Permanent Redirects): 10m - HTTP 404 (Not Found), 410 (Gone), 451 (Unavailable For Legal - Reasons): 120s HTTP 405 (Method Not Found), 501 (Not - Implemented): 60s. These defaults can be overridden in - negative_caching_policy. + the ``cacheMode`` is set to ``FORCE_CACHE_ALL``, negative + caching applies to all responses with the specified response + code, and overrides any caching headers. By default, Cloud + CDN applies the following TTLs to these HTTP status codes: + + - 300 (Multiple Choice), 301, 308 (Permanent Redirects): 10m + - 404 (Not Found), 410 (Gone), 451 (Unavailable For Legal + Reasons): 120s + - 405 (Method Not Found), 501 (Not Implemented): 60s + + These defaults can be overridden in + ``negativeCachingPolicy``. If not specified, Cloud CDN + applies negative caching by default. This field is a member of `oneof`_ ``_negative_caching``. negative_caching_policy (MutableSequence[google.cloud.compute_v1beta.types.CachePolicyNegativeCachingPolicy]): Sets a cache TTL for the specified HTTP status code. - negative_caching must be enabled to configure - negative_caching_policy. Omitting the policy and leaving - negative_caching enabled will use Cloud CDN's default cache - TTLs. Note that when specifying an explicit - negative_caching_policy, you should take care to specify a + ``negativeCaching`` must be enabled to configure + ``negativeCachingPolicy``. Omitting the policy and leaving + ``negativeCaching`` enabled will use Cloud CDN's default + cache TTLs. Note that when specifying an explicit + ``negativeCachingPolicy``, you should take care to specify a cache TTL for all response codes that you wish to cache. Cloud CDN will not apply any default negative caching when a policy exists. request_coalescing (bool): If true then Cloud CDN will combine multiple concurrent cache fill requests into a small - number of requests to the origin. + number of requests to the origin. If not + specified, Cloud CDN applies request coalescing + by default. This field is a member of `oneof`_ ``_request_coalescing``. serve_while_stale (google.cloud.compute_v1beta.types.Duration): @@ -25844,7 +25897,7 @@ class CachePolicy(proto.Message): class CacheMode(proto.Enum): r"""Specifies the cache setting for all responses from this route. If - not specified, the default value is CACHE_ALL_STATIC. + not specified, Cloud CDN uses ``CACHE_ALL_STATIC`` mode. Values: UNDEFINED_CACHE_MODE (0): @@ -25946,16 +25999,16 @@ class CachePolicyCacheKeyPolicy(proto.Message): excluded_query_parameters (MutableSequence[str]): Names of query string parameters to exclude in cache keys. All other parameters will be included. Either specify - excluded_query_parameters or included_query_parameters, not - both. '&' and '=' will be percent encoded and not treated as - delimiters. + ``excludedQueryParameters`` or ``includedQueryParameters``, + not both. '&' and '=' will be percent encoded and not + treated as delimiters. Note: This field applies to routes that use backend services. Attempting to set it on a route that points exclusively to Backend Buckets will result in a configuration error. For routes that point to a Backend - Bucket, use includedQueryParameters to define which - parameters should be a part of the cache key. + Bucket, use ``includedQueryParameters`` to define which + parameters should be part of the cache key. include_host (bool): If true, requests to different hosts will be cached separately. @@ -25982,8 +26035,8 @@ class CachePolicyCacheKeyPolicy(proto.Message): This field is a member of `oneof`_ ``_include_protocol``. include_query_string (bool): If true, include query string parameters in the cache key - according to included_query_parameters and - excluded_query_parameters. If neither is set, the entire + according to ``includedQueryParameters`` and + ``excludedQueryParameters``. If neither is set, the entire query string will be included. If false, the query string will be excluded from the cache key entirely. @@ -25991,8 +26044,8 @@ class CachePolicyCacheKeyPolicy(proto.Message): services. Attempting to set it on a route that points exclusively to Backend Buckets will result in a configuration error. For routes that point to a Backend - Bucket, use includedQueryParameters to define which - parameters should be a part of the cache key. + Bucket, use ``includedQueryParameters`` to define which + parameters should be part of the cache key. This field is a member of `oneof`_ ``_include_query_string``. included_cookie_names (MutableSequence[str]): @@ -26012,9 +26065,9 @@ class CachePolicyCacheKeyPolicy(proto.Message): included_query_parameters (MutableSequence[str]): Names of query string parameters to include in cache keys. All other parameters will be excluded. Either specify - included_query_parameters or excluded_query_parameters, not - both. '&' and '=' will be percent encoded and not treated as - delimiters. + ``includedQueryParameters`` or ``excludedQueryParameters``, + not both. '&' and '=' will be percent encoded and not + treated as delimiters. """ excluded_query_parameters: MutableSequence[str] = proto.RepeatedField( @@ -26885,6 +26938,12 @@ class Commitment(proto.Message): except the last character, which cannot be a dash. This field is a member of `oneof`_ ``_name``. + params (google.cloud.compute_v1beta.types.CommitmentParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. plan (str): The minimum time duration that you commit to purchasing resources. The plan that you choose determines the preset @@ -27278,6 +27337,12 @@ class Type(proto.Enum): number=3373707, optional=True, ) + params: "CommitmentParams" = proto.Field( + proto.MESSAGE, + number=78313862, + optional=True, + message="CommitmentParams", + ) plan: str = proto.Field( proto.STRING, number=3443497, @@ -27494,6 +27559,28 @@ def raw_page(self): ) +class CommitmentParams(proto.Message): + r"""Additional commitment params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Input only. Resource manager tags to be bound to the + commitment. Tag keys and values have the same definition as + resource manager tags. Keys and values can be either in + numeric format, such as ``tagKeys/{tag_key_id}`` and + ``tagValues/{tag_value_id}`` or in namespaced format such as + ``{org_id|project_id}/{tag_key_short_name}`` and + ``{tag_value_short_name}``. The field is ignored (both PUT & + PATCH) when empty. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + + class CommitmentResourceStatus(proto.Message): r"""[Output Only] Contains output only fields. @@ -27777,6 +27864,68 @@ def raw_page(self): ) +class CompositeHealthCheckHealth(proto.Message): + r"""Response message for RegionCompositeHealthChecks.GetHealth + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_sources (MutableSequence[google.cloud.compute_v1beta.types.CompositeHealthChecksGetHealthResponseHealthSourceHealth]): + Health sources and their corresponding health + states. + health_state (str): + Health state of the CompositeHealthCheck. + Check the HealthState enum for the list of + possible values. + + This field is a member of `oneof`_ ``_health_state``. + kind (str): + Output only. [Output Only] Type of resource. + Alwayscompute#compositeHealthCheckHealth for the health of + composite health checks. + + This field is a member of `oneof`_ ``_kind``. + """ + + class HealthState(proto.Enum): + r"""Health state of the CompositeHealthCheck. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + health_sources: MutableSequence[ + "CompositeHealthChecksGetHealthResponseHealthSourceHealth" + ] = proto.RepeatedField( + proto.MESSAGE, + number=397879509, + message="CompositeHealthChecksGetHealthResponseHealthSourceHealth", + ) + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + class CompositeHealthCheckList(proto.Message): r""" @@ -27852,6 +28001,57 @@ def raw_page(self): ) +class CompositeHealthChecksGetHealthResponseHealthSourceHealth(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_state (str): + Health state of the associated HealthSource + resource. Check the HealthState enum for the + list of possible values. + + This field is a member of `oneof`_ ``_health_state``. + source (str): + Fully qualified URL of the associated + HealthSource resource. + + This field is a member of `oneof`_ ``_source``. + """ + + class HealthState(proto.Enum): + r"""Health state of the associated HealthSource resource. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + class CompositeHealthChecksScopedList(proto.Message): r""" @@ -37698,6 +37898,35 @@ class DiskTypesScopedList(proto.Message): ) +class DiskUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + disk. If empty, the disk will be re-encrypted using the + primary version of the disk's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/location/keyRings/key_ring/cryptoKeys/key + + Where project is the project ID or project number. + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class DisksAddResourcePoliciesRequest(proto.Message): r""" @@ -38116,7 +38345,7 @@ class EnableXpnResourceProjectRequest(proto.Message): class Error(proto.Message): - r"""Reason why the topology state change failed + r"""Output only. Encountered errors. Attributes: errors (MutableSequence[google.cloud.compute_v1beta.types.Errors]): @@ -42038,6 +42267,11 @@ class FutureReservation(proto.Message): new commitment or update an existing commitment. This field is a member of `oneof`_ ``_commitment_info``. + confidential_compute_type (str): + Check the ConfidentialComputeType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_confidential_compute_type``. creation_timestamp (str): Output only. [Output Only] The creation timestamp for this future reservation inRFC3339 text format. @@ -42089,6 +42323,12 @@ class FutureReservation(proto.Message): reservations name format will be -date-####. This field is a member of `oneof`_ ``_name_prefix``. + params (google.cloud.compute_v1beta.types.FutureReservationParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. planning_status (str): Planning state before being submitted for evaluation Check the PlanningStatus enum for the @@ -42165,6 +42405,23 @@ class FutureReservation(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class ConfidentialComputeType(proto.Enum): + r""" + + Values: + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE (0): + A value indicating that the enum field is not + set. + CONFIDENTIAL_COMPUTE_TYPE_TDX (301241954): + Intel Trust Domain Extensions. + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED (42227601): + No description available. + """ + + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE = 0 + CONFIDENTIAL_COMPUTE_TYPE_TDX = 301241954 + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED = 42227601 + class DeploymentType(proto.Enum): r"""Type of the deployment requested as part of future reservation. @@ -42323,6 +42580,11 @@ class SchedulingType(proto.Enum): optional=True, message="FutureReservationCommitmentInfo", ) + confidential_compute_type: str = proto.Field( + proto.STRING, + number=386447257, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -42363,6 +42625,12 @@ class SchedulingType(proto.Enum): number=236409542, optional=True, ) + params: "FutureReservationParams" = proto.Field( + proto.MESSAGE, + number=78313862, + optional=True, + message="FutureReservationParams", + ) planning_status: str = proto.Field( proto.STRING, number=19714836, @@ -42526,6 +42794,28 @@ class PreviousCommitmentTerms(proto.Enum): ) +class FutureReservationParams(proto.Message): + r"""Additional future reservation params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Input only. Resource manager tags to be bound to the future + reservation. Tag keys and values have the same definition as + resource manager tags. Keys and values can be either in + numeric format, such as ``tagKeys/{tag_key_id}`` and + ``tagValues/{tag_value_id}`` or in namespaced format such as + ``{org_id|project_id}/{tag_key_short_name}`` and + ``{tag_value_short_name}``. The field is ignored (both PUT & + PATCH) when empty. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + + class FutureReservationSpecificSKUProperties(proto.Message): r""" @@ -45026,6 +45316,62 @@ class GetHealthRegionBackendServiceRequest(proto.Message): ) +class GetHealthRegionCompositeHealthCheckRequest(proto.Message): + r"""A request message for RegionCompositeHealthChecks.GetHealth. + See the method description for details. + + Attributes: + composite_health_check (str): + Name of the CompositeHealthCheck resource to + get health for. + project (str): + Name of the project scoping this request. + region (str): + Name of the region scoping this request. + """ + + composite_health_check: str = proto.Field( + proto.STRING, + number=466984989, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + +class GetHealthRegionHealthSourceRequest(proto.Message): + r"""A request message for RegionHealthSources.GetHealth. See the + method description for details. + + Attributes: + health_source (str): + Name of the HealthSource resource to get + health for. + project (str): + Name of the project scoping this request. + region (str): + Name of the region scoping this request. + """ + + health_source: str = proto.Field( + proto.STRING, + number=376521566, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + class GetHealthTargetPoolRequest(proto.Message): r"""A request message for TargetPools.GetHealth. See the method description for details. @@ -51626,6 +51972,7 @@ class GuestOsFeature(proto.Message): - TDX_CAPABLE - IDPF - SNP_SVSM_CAPABLE + - CCA_CAPABLE For more information, see Enabling guest operating system features. Check the Type enum for the list of possible @@ -51652,6 +51999,7 @@ class Type(proto.Enum): - TDX_CAPABLE - IDPF - SNP_SVSM_CAPABLE + - CCA_CAPABLE For more information, see Enabling guest operating system features. @@ -53929,6 +54277,67 @@ def raw_page(self): ) +class HealthSourceHealth(proto.Message): + r"""Response message for RegionHealthSources.GetHealth + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_state (str): + Health state of the HealthSource. + Check the HealthState enum for the list of + possible values. + + This field is a member of `oneof`_ ``_health_state``. + kind (str): + Output only. [Output Only] Type of resource. + Alwayscompute#healthSourceHealth for the health of health + sources. + + This field is a member of `oneof`_ ``_kind``. + sources (MutableSequence[google.cloud.compute_v1beta.types.HealthSourcesGetHealthResponseSourceInfo]): + Health state details of the sources. + """ + + class HealthState(proto.Enum): + r"""Health state of the HealthSource. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + sources: MutableSequence["HealthSourcesGetHealthResponseSourceInfo"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=125606840, + message="HealthSourcesGetHealthResponseSourceInfo", + ) + ) + + class HealthSourceList(proto.Message): r""" @@ -54003,6 +54412,89 @@ def raw_page(self): ) +class HealthSourcesGetHealthResponseSourceInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backends (MutableSequence[google.cloud.compute_v1beta.types.HealthSourcesGetHealthResponseSourceInfoBackendInfo]): + Represents an instance group or network endpoint group + behind the source backend service. Only used if the + sourceType of the regionHealthSource is BACKEND_SERVICE. + forwarding_rule (str): + Fully qualified URL of the forwarding rule + associated with the source resource if it is a + L4ILB backend service. + + This field is a member of `oneof`_ ``_forwarding_rule``. + source (str): + Fully qualified URL of the associated source + resource. This is always a backend service URL. + + This field is a member of `oneof`_ ``_source``. + """ + + backends: MutableSequence["HealthSourcesGetHealthResponseSourceInfoBackendInfo"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=510839903, + message="HealthSourcesGetHealthResponseSourceInfoBackendInfo", + ) + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + +class HealthSourcesGetHealthResponseSourceInfoBackendInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + endpoint_count (int): + Total number of endpoints when determining + the health of the regionHealthSource. + + This field is a member of `oneof`_ ``_endpoint_count``. + group (str): + Fully qualified URL of an instance group or + network endpoint group behind the source backend + service. + + This field is a member of `oneof`_ ``_group``. + healthy_endpoint_count (int): + Number of endpoints considered healthy when + determining health of the regionHealthSource. + + This field is a member of `oneof`_ ``_healthy_endpoint_count``. + """ + + endpoint_count: int = proto.Field( + proto.INT32, + number=486641125, + optional=True, + ) + group: str = proto.Field( + proto.STRING, + number=98629247, + optional=True, + ) + healthy_endpoint_count: int = proto.Field( + proto.INT32, + number=291284167, + optional=True, + ) + + class HealthSourcesScopedList(proto.Message): r""" @@ -55196,8 +55688,11 @@ class HttpRouteAction(proto.Message): Attributes: cache_policy (google.cloud.compute_v1beta.types.CachePolicy): - Cache policy for this URL Map’s route. Available only for - Global EXTERNAL_MANAGED load balancer schemes. + Specifies the cache policy configuration for matched + traffic. Available only for Global ``EXTERNAL_MANAGED`` load + balancer schemes. At least one property must be specified. + This policy cannot be specified if any target backend has + Identity-Aware Proxy enabled. This field is a member of `oneof`_ ``_cache_policy``. cors_policy (google.cloud.compute_v1beta.types.CorsPolicy): @@ -63415,14 +63910,15 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_base_instance_name``. creation_timestamp (str): - Output only. [Output Only] The creation timestamp for this + Output only. The creation timestamp for this managed instance group inRFC3339 text format. This field is a member of `oneof`_ ``_creation_timestamp``. current_actions (google.cloud.compute_v1beta.types.InstanceGroupManagerActionsSummary): - Output only. [Output Only] The list of instance actions and - the number of instances in this managed instance group that - are scheduled for each of those actions. + Output only. The list of instance actions and + the number of instances in this managed instance + group that are scheduled for each of those + actions. This field is a member of `oneof`_ ``_current_actions``. description (str): @@ -63456,8 +63952,9 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_fingerprint``. id (int): - Output only. [Output Only] A unique identifier for this - resource type. The server generates this identifier. + Output only. A unique identifier for this + resource type. The server generates this + identifier. This field is a member of `oneof`_ ``_id``. instance_flexibility_policy (google.cloud.compute_v1beta.types.InstanceGroupManagerInstanceFlexibilityPolicy): @@ -63468,7 +63965,7 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_instance_flexibility_policy``. instance_group (str): - Output only. [Output Only] The URL of the Instance Group + Output only. The URL of the Instance Group resource. This field is a member of `oneof`_ ``_instance_group``. @@ -63489,9 +63986,9 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_instance_template``. kind (str): - Output only. [Output Only] The resource type, which is - alwayscompute#instanceGroupManager for managed instance - groups. + Output only. The resource type, which is + alwayscompute#instanceGroupManager for managed + instance groups. This field is a member of `oneof`_ ``_kind``. list_managed_instances_results (str): @@ -63533,16 +64030,16 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_resource_policies``. satisfies_pzi (bool): - Output only. [Output Only] Reserved for future use. + Output only. Reserved for future use. This field is a member of `oneof`_ ``_satisfies_pzi``. satisfies_pzs (bool): - Output only. [Output Only] Reserved for future use. + Output only. Reserved for future use. This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): - Output only. [Output Only] The URL for this managed instance - group. The server defines this URL. + Output only. The URL for this managed + instance group. The server defines this URL. This field is a member of `oneof`_ ``_self_link``. service_account (str): @@ -63567,7 +64064,7 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_stateful_policy``. status (google.cloud.compute_v1beta.types.InstanceGroupManagerStatus): - Output only. [Output Only] The status of this managed + Output only. The status of this managed instance group. This field is a member of `oneof`_ ``_status``. @@ -63633,8 +64130,9 @@ class InstanceGroupManager(proto.Message): will be applied to all remaining instances. For more information, read aboutcanary updates. zone (str): - Output only. [Output Only] The URL of azone where the - managed instance group is located (for zonal resources). + Output only. The URL of azone + where the managed instance group is located (for + zonal resources). This field is a member of `oneof`_ ``_zone``. """ @@ -63892,100 +64390,105 @@ class InstanceGroupManagerActionsSummary(proto.Message): Attributes: abandoning (int): - Output only. [Output Only] The total number of instances in - the managed instance group that are scheduled to be - abandoned. Abandoning an instance removes it from the - managed instance group without deleting it. + Output only. The total number of instances in + the managed instance group that are scheduled to + be abandoned. Abandoning an instance removes it + from the managed instance group without deleting + it. This field is a member of `oneof`_ ``_abandoning``. adopting (int): - [Output Only] The number of instances in the managed - instance group that are scheduled to be adopted or are - currently being adopted. + Output only. The number of instances in the + managed instance group that are scheduled to be + adopted or are currently being adopted. This field is a member of `oneof`_ ``_adopting``. creating (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be created or - are currently being created. If the group fails to create - any of these instances, it tries again until it creates the - instance successfully. + Output only. The number of instances in the + managed instance group that are scheduled to be + created or are currently being created. If the + group fails to create any of these instances, it + tries again until it creates the instance + successfully. - If you have disabled creation retries, this field will not - be populated; instead, the creatingWithoutRetries field will - be populated. + If you have disabled creation retries, this + field will not be populated; instead, the + creatingWithoutRetries field will be populated. This field is a member of `oneof`_ ``_creating``. creating_without_retries (int): - Output only. [Output Only] The number of instances that the - managed instance group will attempt to create. The group - attempts to create each instance only once. If the group - fails to create any of these instances, it decreases the - group's targetSize value accordingly. + Output only. The number of instances that the + managed instance group will attempt to create. + The group attempts to create each instance only + once. If the group fails to create any of these + instances, it decreases the group's targetSize + value accordingly. This field is a member of `oneof`_ ``_creating_without_retries``. deleting (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be deleted or - are currently being deleted. + Output only. The number of instances in the + managed instance group that are scheduled to be + deleted or are currently being deleted. This field is a member of `oneof`_ ``_deleting``. none (int): - Output only. [Output Only] The number of instances in the - managed instance group that are running and have no - scheduled actions. + Output only. The number of instances in the + managed instance group that are running and have + no scheduled actions. This field is a member of `oneof`_ ``_none``. recreating (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be recreated or - are currently being being recreated. Recreating an instance - deletes the existing root persistent disk and creates a new - disk from the image that is defined in the instance - template. + Output only. The number of instances in the + managed instance group that are scheduled to be + recreated or are currently being being + recreated. Recreating an instance deletes the + existing root persistent disk and creates a new + disk from the image that is defined in the + instance template. This field is a member of `oneof`_ ``_recreating``. refreshing (int): - Output only. [Output Only] The number of instances in the - managed instance group that are being reconfigured with - properties that do not require a restart or a recreate - action. For example, setting or removing target pools for - the instance. + Output only. The number of instances in the + managed instance group that are being + reconfigured with properties that do not require + a restart or a recreate action. For example, + setting or removing target pools for the + instance. This field is a member of `oneof`_ ``_refreshing``. restarting (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be restarted or - are currently being restarted. + Output only. The number of instances in the + managed instance group that are scheduled to be + restarted or are currently being restarted. This field is a member of `oneof`_ ``_restarting``. resuming (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be resumed or - are currently being resumed. + Output only. The number of instances in the + managed instance group that are scheduled to be + resumed or are currently being resumed. This field is a member of `oneof`_ ``_resuming``. starting (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be started or - are currently being started. + Output only. The number of instances in the + managed instance group that are scheduled to be + started or are currently being started. This field is a member of `oneof`_ ``_starting``. stopping (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be stopped or - are currently being stopped. + Output only. The number of instances in the + managed instance group that are scheduled to be + stopped or are currently being stopped. This field is a member of `oneof`_ ``_stopping``. suspending (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be suspended or - are currently being suspended. + Output only. The number of instances in the + managed instance group that are scheduled to be + suspended or are currently being suspended. This field is a member of `oneof`_ ``_suspending``. verifying (int): - Output only. [Output Only] The number of instances in the - managed instance group that are being verified. See the + Output only. The number of instances in the managed instance + group that are being verified. See the managedInstances[].currentAction property in the listManagedInstances method documentation. @@ -64680,7 +65183,7 @@ class InstanceGroupManagerResizeRequest(proto.Message): This field is a member of `oneof`_ ``_count``. creation_timestamp (str): - Output only. [Output Only] The creation timestamp for this + Output only. The creation timestamp for this resize request inRFC3339 text format. This field is a member of `oneof`_ ``_creation_timestamp``. @@ -64689,8 +65192,9 @@ class InstanceGroupManagerResizeRequest(proto.Message): This field is a member of `oneof`_ ``_description``. id (int): - Output only. [Output Only] A unique identifier for this - resource type. The server generates this identifier. + Output only. A unique identifier for this + resource type. The server generates this + identifier. This field is a member of `oneof`_ ``_id``. instances (MutableSequence[google.cloud.compute_v1beta.types.PerInstanceConfig]): @@ -64700,9 +65204,9 @@ class InstanceGroupManagerResizeRequest(proto.Message): increased by this number. This field cannot be used together with 'resize_by'. kind (str): - Output only. [Output Only] The resource type, which is - alwayscompute#instanceGroupManagerResizeRequest for resize - requests. + Output only. The resource type, which is + alwayscompute#instanceGroupManagerResizeRequest + for resize requests. This field is a member of `oneof`_ ``_kind``. name (str): @@ -64712,9 +65216,9 @@ class InstanceGroupManagerResizeRequest(proto.Message): This field is a member of `oneof`_ ``_name``. region (str): - Output only. [Output Only] The URL of aregion where the - resize request is located. Populated only for regional - resize requests. + Output only. The URL of a region + where the resize request is located. Populated + only for regional resize requests. This field is a member of `oneof`_ ``_region``. requested_run_duration (google.cloud.compute_v1beta.types.Duration): @@ -64731,34 +65235,35 @@ class InstanceGroupManagerResizeRequest(proto.Message): This field is a member of `oneof`_ ``_resize_by``. self_link (str): - Output only. [Output Only] The URL for this resize request. + Output only. The URL for this resize request. The server defines this URL. This field is a member of `oneof`_ ``_self_link``. self_link_with_id (str): - Output only. [Output Only] Server-defined URL for this + Output only. Server-defined URL for this resource with the resource id. This field is a member of `oneof`_ ``_self_link_with_id``. state (str): - Output only. [Output only] Current state of the request. - Check the State enum for the list of possible values. + Output only. Current state of the request. + Check the State enum for the list of possible + values. This field is a member of `oneof`_ ``_state``. status (google.cloud.compute_v1beta.types.InstanceGroupManagerResizeRequestStatus): - Output only. [Output only] Status of the request. + Output only. Status of the request. This field is a member of `oneof`_ ``_status``. zone (str): - Output only. [Output Only] The URL of azone where the resize - request is located. Populated only for zonal resize - requests. + Output only. The URL of a zone + where the resize request is located. Populated + only for zonal resize requests. This field is a member of `oneof`_ ``_zone``. """ class State(proto.Enum): - r"""Output only. [Output only] Current state of the request. + r"""Output only. Current state of the request. Values: UNDEFINED_STATE (0): @@ -64886,25 +65391,26 @@ class InstanceGroupManagerResizeRequestStatus(proto.Message): Attributes: error (google.cloud.compute_v1beta.types.Error): - Output only. [Output only] Fatal errors encountered during - the queueing or provisioning phases of the ResizeRequest - that caused the transition to the FAILED state. Contrary to - the last_attempt errors, this field is final and errors are - never removed from here, as the ResizeRequest is not going - to retry. + Output only. Fatal errors encountered during the queueing or + provisioning phases of the ResizeRequest that caused the + transition to the FAILED state. Contrary to the last_attempt + errors, this field is final and errors are never removed + from here, as the ResizeRequest is not going to retry. This field is a member of `oneof`_ ``_error``. last_attempt (google.cloud.compute_v1beta.types.InstanceGroupManagerResizeRequestStatusLastAttempt): - Output only. [Output only] Information about the last - attempt to fulfill the request. The value is temporary since - the ResizeRequest can retry, as long as it's still active - and the last attempt value can either be cleared or replaced - with a different error. Since ResizeRequest retries - infrequently, the value may be stale and no longer show an - active problem. The value is cleared when ResizeRequest - transitions to the final state (becomes inactive). If the - final state is FAILED the error describing it will be - storred in the "error" field only. + Output only. Information about the last + attempt to fulfill the request. The value is + temporary since the ResizeRequest can retry, as + long as it's still active and the last attempt + value can either be cleared or replaced with a + different error. Since ResizeRequest retries + infrequently, the value may be stale and no + longer show an active problem. The value is + cleared when ResizeRequest transitions to the + final state (becomes inactive). If the final + state is FAILED the error describing it will be + stored in the "error" field only. This field is a member of `oneof`_ ``_last_attempt``. """ @@ -65116,49 +65622,53 @@ class InstanceGroupManagerStatus(proto.Message): Attributes: all_instances_config (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusAllInstancesConfig): - Output only. [Output only] Status of all-instances + Output only. Status of all-instances configuration on the group. This field is a member of `oneof`_ ``_all_instances_config``. applied_accelerator_topologies (MutableSequence[google.cloud.compute_v1beta.types.InstanceGroupManagerStatusAcceleratorTopology]): - Output only. [Output Only] The accelerator topology applied - to this MIG. Currently only one accelerator topology is - supported. + Output only. The accelerator topology applied + to this MIG. Currently only one accelerator + topology is supported. autoscaler (str): - Output only. [Output Only] The URL of theAutoscaler that - targets this instance group manager. + Output only. The URL of theAutoscaler + that targets this instance group manager. This field is a member of `oneof`_ ``_autoscaler``. bulk_instance_operation (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusBulkInstanceOperation): - Output only. [Output Only] The status of bulk instance + Output only. The status of bulk instance operation. This field is a member of `oneof`_ ``_bulk_instance_operation``. current_instance_statuses (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusInstanceStatusSummary): - Output only. [Output Only] The list of instance statuses and - the number of instances in this managed instance group that - have the status. Currently only shown for TPU MIGs + Output only. The list of instance statuses + and the number of instances in this managed + instance group that have the status. Currently + only shown for TPU MIGs This field is a member of `oneof`_ ``_current_instance_statuses``. is_stable (bool): - Output only. [Output Only] A bit indicating whether the - managed instance group is in a stable state. A stable state - means that: none of the instances in the managed instance - group is currently undergoing any type of change (for - example, creation, restart, or deletion); no future changes - are scheduled for instances in the managed instance group; - and the managed instance group itself is not being modified. + Output only. A bit indicating whether the + managed instance group is in a stable state. A + stable state means that: none of the instances + in the managed instance group is currently + undergoing any type of change (for example, + creation, restart, or deletion); no future + changes are scheduled for instances in the + managed instance group; and the managed instance + group itself is not being modified. This field is a member of `oneof`_ ``_is_stable``. stateful (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusStateful): - Output only. [Output Only] Stateful status of the given + Output only. Stateful status of the given Instance Group Manager. This field is a member of `oneof`_ ``_stateful``. version_target (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusVersionTarget): - Output only. [Output Only] A status of consistency of - Instances' versions with their target version specified by - version field on Instance Group Manager. + Output only. A status of consistency of + Instances' versions with their target version + specified by version field on Instance Group + Manager. This field is a member of `oneof`_ ``_version_target``. """ @@ -65223,26 +65733,26 @@ class InstanceGroupManagerStatusAcceleratorTopology(proto.Message): Attributes: accelerator_topology (str): - Output only. [Output Only] Topology in the format of: - "16x16", "4x4x4", etc. The value is the same as configured - in the WorkloadPolicy. + Output only. Topology in the format of: + "16x16", "4x4x4", etc. The value is the same as + configured in the WorkloadPolicy. This field is a member of `oneof`_ ``_accelerator_topology``. state (str): - Output only. [Output Only] The state of the accelerator - topology. Check the State enum for the list of possible - values. + Output only. The state of the accelerator + topology. Check the State enum for the list of + possible values. This field is a member of `oneof`_ ``_state``. state_details (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusAcceleratorTopologyAcceleratorTopologyStateDetails): - Output only. [Output Only] The result of the latest + Output only. The result of the latest accelerator topology state check. This field is a member of `oneof`_ ``_state_details``. """ class State(proto.Enum): - r"""Output only. [Output Only] The state of the accelerator topology. + r"""Output only. The state of the accelerator topology. Values: UNDEFINED_STATE (0): @@ -65301,12 +65811,13 @@ class InstanceGroupManagerStatusAcceleratorTopologyAcceleratorTopologyStateDetai Attributes: error (google.cloud.compute_v1beta.types.Error): - Output only. [Output Only] Encountered errors. + Output only. Encountered errors. This field is a member of `oneof`_ ``_error``. timestamp (str): - Output only. [Output Only] Timestamp is shown only if there - is an error. The field has // RFC3339 // text format. + Output only. Timestamp is shown only if there + is an error. The field has // RFC3339 // + text format. This field is a member of `oneof`_ ``_timestamp``. """ @@ -65331,15 +65842,15 @@ class InstanceGroupManagerStatusAllInstancesConfig(proto.Message): Attributes: current_revision (str): - Output only. [Output Only] Current all-instances - configuration revision. This value is in RFC3339 text - format. + Output only. Current all-instances + configuration revision. This value is in RFC3339 + text format. This field is a member of `oneof`_ ``_current_revision``. effective (bool): - Output only. [Output Only] A bit indicating whether this - configuration has been applied to all managed instances in - the group. + Output only. A bit indicating whether this + configuration has been applied to all managed + instances in the group. This field is a member of `oneof`_ ``_effective``. """ @@ -65365,12 +65876,12 @@ class InstanceGroupManagerStatusBulkInstanceOperation(proto.Message): Attributes: in_progress (bool): - Output only. [Output Only] Informs whether bulk instance + Output only. Informs whether bulk instance operation is in progress. This field is a member of `oneof`_ ``_in_progress``. last_progress_check (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck): - Output only. [Output Only] Information from the last + Output only. Information from the last progress check of bulk instance operation. This field is a member of `oneof`_ ``_last_progress_check``. @@ -65396,14 +65907,14 @@ class InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck(proto.Mes Attributes: error (google.cloud.compute_v1beta.types.Error): - Output only. [Output Only] Errors encountered during bulk + Output only. Errors encountered during bulk instance operation. This field is a member of `oneof`_ ``_error``. timestamp (str): - Output only. [Output Only] Timestamp of the last progress - check of bulk instance operation. Timestamp is in RFC3339 - text format. + Output only. Timestamp of the last progress + check of bulk instance operation. Timestamp is + in RFC3339 text format. This field is a member of `oneof`_ ``_timestamp``. """ @@ -65433,76 +65944,84 @@ class InstanceGroupManagerStatusInstanceStatusSummary(proto.Message): Attributes: deprovisioning (int): - Output only. [Output Only] The number of instances in the - managed instance group that have DEPROVISIONING status. + Output only. The number of instances in the + managed instance group that have DEPROVISIONING + status. This field is a member of `oneof`_ ``_deprovisioning``. non_existent (int): - Output only. [Output Only] The number of instances that have - not been created yet or have been deleted. Includes only - instances that would be shown in the listManagedInstances - method and not all instances that have been deleted in the - lifetime of the MIG. Does not include FlexStart instances - that are waiting for the resources availability, they are + Output only. The number of instances that + have not been created yet or have been deleted. + Includes only instances that would be shown in + the listManagedInstances method and not all + instances that have been deleted in the lifetime + of the MIG. + Does not include FlexStart instances that are + waiting for the resources availability, they are considered as 'pending'. This field is a member of `oneof`_ ``_non_existent``. pending (int): - Output only. [Output Only] The number of instances in the - managed instance group that have PENDING status, that is - FlexStart instances that are waiting for resources. - Instances that do not exist because of the other reasons are - counted as 'non_existent'. + Output only. The number of instances in the managed instance + group that have PENDING status, that is FlexStart instances + that are waiting for resources. Instances that do not exist + because of the other reasons are counted as 'non_existent'. This field is a member of `oneof`_ ``_pending``. pending_stop (int): - Output only. [Output Only] The number of instances in the - managed instance group that have PENDING_STOP status. + Output only. The number of instances in the managed instance + group that have PENDING_STOP status. This field is a member of `oneof`_ ``_pending_stop``. provisioning (int): - Output only. [Output Only] The number of instances in the - managed instance group that have PROVISIONING status. + Output only. The number of instances in the + managed instance group that have PROVISIONING + status. This field is a member of `oneof`_ ``_provisioning``. repairing (int): - Output only. [Output Only] The number of instances in the - managed instance group that have REPAIRING status. + Output only. The number of instances in the + managed instance group that have REPAIRING + status. This field is a member of `oneof`_ ``_repairing``. running (int): - Output only. [Output Only] The number of instances in the + Output only. The number of instances in the managed instance group that have RUNNING status. This field is a member of `oneof`_ ``_running``. staging (int): - Output only. [Output Only] The number of instances in the + Output only. The number of instances in the managed instance group that have STAGING status. This field is a member of `oneof`_ ``_staging``. stopped (int): - Output only. [Output Only] The number of instances in the + Output only. The number of instances in the managed instance group that have STOPPED status. This field is a member of `oneof`_ ``_stopped``. stopping (int): - Output only. [Output Only] The number of instances in the - managed instance group that have STOPPING status. + Output only. The number of instances in the + managed instance group that have STOPPING + status. This field is a member of `oneof`_ ``_stopping``. suspended (int): - Output only. [Output Only] The number of instances in the - managed instance group that have SUSPENDED status. + Output only. The number of instances in the + managed instance group that have SUSPENDED + status. This field is a member of `oneof`_ ``_suspended``. suspending (int): - Output only. [Output Only] The number of instances in the - managed instance group that have SUSPENDING status. + Output only. The number of instances in the + managed instance group that have SUSPENDING + status. This field is a member of `oneof`_ ``_suspending``. terminated (int): - Output only. [Output Only] The number of instances in the - managed instance group that have TERMINATED status. + Output only. The number of instances in the + managed instance group that have TERMINATED + status. This field is a member of `oneof`_ ``_terminated``. """ @@ -65581,28 +66100,30 @@ class InstanceGroupManagerStatusStateful(proto.Message): Attributes: has_stateful_config (bool): - Output only. [Output Only] A bit indicating whether the - managed instance group has stateful configuration, that is, - if you have configured any items in a stateful policy or in - per-instance configs. The group might report that it has no - stateful configuration even when there is still some - preserved state on a managed instance, for example, if you - have deleted all PICs but not yet applied those deletions. + Output only. A bit indicating whether the + managed instance group has stateful + configuration, that is, if you have configured + any items in a stateful policy or in + per-instance configs. The group might report + that it has no stateful configuration even when + there is still some preserved state on a managed + instance, for example, if you have deleted all + PICs but not yet applied those deletions. This field is a member of `oneof`_ ``_has_stateful_config``. is_stateful (bool): - Output only. [Output Only] A bit indicating whether the - managed instance group has stateful configuration, that is, - if you have configured any items in a stateful policy or in - per-instance configs. The group might report that it has no - stateful configuration even when there is still some - preserved state on a managed instance, for example, if you - have deleted all PICs but not yet applied those deletions. - This field is deprecated in favor of has_stateful_config. + Output only. A bit indicating whether the managed instance + group has stateful configuration, that is, if you have + configured any items in a stateful policy or in per-instance + configs. The group might report that it has no stateful + configuration even when there is still some preserved state + on a managed instance, for example, if you have deleted all + PICs but not yet applied those deletions. This field is + deprecated in favor of has_stateful_config. This field is a member of `oneof`_ ``_is_stateful``. per_instance_configs (google.cloud.compute_v1beta.types.InstanceGroupManagerStatusStatefulPerInstanceConfigs): - Output only. [Output Only] Status of per-instance + Output only. Status of per-instance configurations on the instances. This field is a member of `oneof`_ ``_per_instance_configs``. @@ -65658,11 +66179,11 @@ class InstanceGroupManagerStatusVersionTarget(proto.Message): Attributes: is_reached (bool): - Output only. [Output Only] A bit indicating whether version - target has been reached in this managed instance group, i.e. - all instances are in their target version. Instances' target - version are specified byversion field on Instance Group - Manager. + Output only. A bit indicating whether version + target has been reached in this managed instance + group, i.e. all instances are in their target + version. Instances' target version are specified + byversion field on Instance Group Manager. This field is a member of `oneof`_ ``_is_reached``. """ @@ -66264,6 +66785,12 @@ class InstanceGroupManagersConfigureAcceleratorTopologiesRequest(proto.Message): The key is the hashed topology locus id. It can be obtained from the GetAvailableAcceleratorTopologies rpc. + accelerator_topology_configurations (MutableMapping[str, google.cloud.compute_v1beta.types.InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration]): + Map of accelerator topologies that should + have their state changed to the specified + configuration. The map key is the hashed + topology locus id. It can be obtained from the + GetAvailableAcceleratorTopologies rpc. """ accelerator_topology_actions: MutableMapping[str, str] = proto.MapField( @@ -66271,6 +66798,75 @@ class InstanceGroupManagersConfigureAcceleratorTopologiesRequest(proto.Message): proto.STRING, number=16520833, ) + accelerator_topology_configurations: MutableMapping[ + str, + "InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration", + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=95019065, + message="InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration", + ) + + +class InstanceGroupManagersConfigureAcceleratorTopologiesRequestAcceleratorTopologyConfiguration( + proto.Message +): + r"""Configuration for a single accelerator topology. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + action (str): + Check the Action enum for the list of + possible values. + + This field is a member of `oneof`_ ``_action``. + external_id (str): + Identifier of the accelerator topology + assigned externally to differentiate who is the + owner of the topology. The format needs to + conform to RFC1035 and be unique. The uniqueness + is guaranteed by the requestor. If it is + provided on activating the sub-slice it will + have to be provided on deactivating as well. + This identifier is cleared on successful deform + of a sub-slice. + + This field is a member of `oneof`_ ``_external_id``. + """ + + class Action(proto.Enum): + r""" + + Values: + UNDEFINED_ACTION (0): + A value indicating that the enum field is not + set. + ACCELERATOR_TOPOLOGY_ACTION_UNSPECIFIED (78494314): + Default value. Should not be used. + ACTIVATE (200393971): + The accelerator topology is to be activated. + DEACTIVATE (98942068): + The accelerator topology is to be + deactivated. + """ + + UNDEFINED_ACTION = 0 + ACCELERATOR_TOPOLOGY_ACTION_UNSPECIFIED = 78494314 + ACTIVATE = 200393971 + DEACTIVATE = 98942068 + + action: str = proto.Field( + proto.STRING, + number=187661878, + optional=True, + ) + external_id: str = proto.Field( + proto.STRING, + number=457537039, + optional=True, + ) class InstanceGroupManagersCreateInstancesRequest(proto.Message): @@ -66430,7 +67026,7 @@ class AcceleratorTopologyHealth(proto.Enum): infrastructure unsuitable for forming a working inter-chip connected group. UNKNOWN (433141802): - No signal available + No description available. """ UNDEFINED_ACCELERATOR_TOPOLOGY_HEALTH = 0 @@ -66506,6 +67102,17 @@ class InstanceGroupManagersGetAvailableAcceleratorTopologiesResponseAcceleratorT Timestamp when the last error happened This field is a member of `oneof`_ ``_error_timestamp``. + external_id (str): + Identifier of the accelerator topology + assigned externally to differentiate who is the + owner of the topology. This is set in + ConfigureAcceleratorTopologies. If it is + provided on activating the sub-slice it will + have to be provided on deactivating as well. + This identifier is cleared on successful deform + of a sub-slice. + + This field is a member of `oneof`_ ``_external_id``. """ class CurrentState(proto.Enum): @@ -66558,6 +67165,11 @@ class CurrentState(proto.Enum): number=219548575, optional=True, ) + external_id: str = proto.Field( + proto.STRING, + number=457537039, + optional=True, + ) class InstanceGroupManagersListErrorsResponse(proto.Message): @@ -105208,6 +105820,11 @@ class NetworkAttachmentConnectedEndpoint(proto.Message): This field is a member of `oneof`_ ``_project_id_or_num``. secondary_ip_cidr_ranges (MutableSequence[str]): Alias IP ranges from the same subnetwork. + service_class_id (str): + The service class id of the producer service + to which the IP was assigned. + + This field is a member of `oneof`_ ``_service_class_id``. status (str): The status of a connected endpoint to this network attachment. Check the Status enum for @@ -105280,6 +105897,11 @@ class Status(proto.Enum): proto.STRING, number=117184788, ) + service_class_id: str = proto.Field( + proto.STRING, + number=422763404, + optional=True, + ) status: str = proto.Field( proto.STRING, number=181260274, @@ -107143,6 +107765,14 @@ class NetworkInterface(proto.Message): empty if not specified by the users. This field is a member of `oneof`_ ``_queue_count``. + service_class_id (str): + Optional. Producer Service's Service class Id for the region + of this network interface. Can only be used with + network_attachment. It is not possible to use on its own + however, network_attachment can be used without + service_class_id. + + This field is a member of `oneof`_ ``_service_class_id``. stack_type (str): The stack type for this network interface. To assign only IPv4 addresses, use IPV4_ONLY. To assign both IPv4 and IPv6 @@ -107377,6 +108007,11 @@ class StackType(proto.Enum): number=503708769, optional=True, ) + service_class_id: str = proto.Field( + proto.STRING, + number=422763404, + optional=True, + ) stack_type: str = proto.Field( proto.STRING, number=425908881, @@ -123226,14 +123861,16 @@ class RawDisk(proto.Message): This field is a member of `oneof`_ ``_sha1_checksum``. source (str): - The full Google Cloud Storage URL where the raw disk image - archive is stored. The following are valid formats for the - URL: + The full Google Cloud Storage URL or Artifact Registry path + where the raw disk image archive is stored. The following + are valid formats: :: - https://storage.googleapis.com/bucket_name/image_archive_name - https://storage.googleapis.com/bucket_name/folder_name/image_archive_name + - projects/project/locations/location/repositories/repo/packages/package/versions/version_id + - projects/project/locations/location/repositories/repo/packages/package/versions/version_id@dirsum_sha256:hex_value In order to create an image, you must provide the full or partial URL of one of the following: @@ -123818,6 +124455,35 @@ def raw_page(self): ) +class RegionDiskUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + disk. If empty, the disk will be re-encrypted using the + primary version of the disk's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/location/keyRings/key_ring/cryptoKeys/key + + Where project is the project ID or project number. + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class RegionDisksAddResourcePoliciesRequest(proto.Message): r""" @@ -125249,6 +125915,33 @@ class RegionSetPolicyRequest(proto.Message): ) +class RegionSnapshotUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + snapshot. If empty, the snapshot will be re-encrypted using + the primary version of the snapshot's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/region/keyRings/region/cryptoKeys/key + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class RegionTargetHttpsProxiesSetSslCertificatesRequest(proto.Message): r""" @@ -126809,6 +127502,11 @@ class Reservation(proto.Message): tied to a commitment. This field is a member of `oneof`_ ``_commitment``. + confidential_compute_type (str): + Check the ConfidentialComputeType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_confidential_compute_type``. creation_timestamp (str): Output only. [Output Only] Creation timestamp inRFC3339 text format. @@ -126973,6 +127671,23 @@ class Reservation(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class ConfidentialComputeType(proto.Enum): + r""" + + Values: + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE (0): + A value indicating that the enum field is not + set. + CONFIDENTIAL_COMPUTE_TYPE_TDX (301241954): + Intel Trust Domain Extensions. + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED (42227601): + No description available. + """ + + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE = 0 + CONFIDENTIAL_COMPUTE_TYPE_TDX = 301241954 + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED = 42227601 + class DeploymentType(proto.Enum): r"""Specifies the deployment strategy for this reservation. @@ -127155,6 +127870,11 @@ class Status(proto.Enum): number=482134805, optional=True, ) + confidential_compute_type: str = proto.Field( + proto.STRING, + number=386447257, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -132607,6 +133327,10 @@ class RolloutWaveDetailsOrchestratedWaveDetails(proto.Message): Output only. Resources failed. This field is a member of `oneof`_ ``_failed_resources_count``. + location_status (MutableMapping[str, google.cloud.compute_v1beta.types.RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus]): + Output only. Status of each location in the + wave. Map keys (locations) must be specified + like "us-east1" or "asia-west1-a". """ completed_resources_count: int = proto.Field( @@ -132628,6 +133352,65 @@ class RolloutWaveDetailsOrchestratedWaveDetails(proto.Message): number=391795315, optional=True, ) + location_status: MutableMapping[ + str, "RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=19007420, + message="RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus", + ) + + +class RolloutWaveDetailsOrchestratedWaveDetailsLocationStatus(proto.Message): + r"""Represents the status of a location in a wave. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + state (str): + Output only. Location state of the wave. + Check the State enum for the list of possible + values. + + This field is a member of `oneof`_ ``_state``. + """ + + class State(proto.Enum): + r"""Output only. Location state of the wave. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + STATE_FAILED (50857931): + Work on the wave failed. + STATE_IN_PROGRESS (413162809): + Work on the wave is in progress. + STATE_PENDING (369985449): + Work on the wave is pending. + STATE_SKIPPED (515663170): + Work on the wave was canceled or skipped. + STATE_SUCCEEDED (466911219): + Work on the wave succeeded. + STATE_UNSPECIFIED (470755401): + Undefined default state. Should never be + exposed to users. + """ + + UNDEFINED_STATE = 0 + STATE_FAILED = 50857931 + STATE_IN_PROGRESS = 413162809 + STATE_PENDING = 369985449 + STATE_SKIPPED = 515663170 + STATE_SUCCEEDED = 466911219 + STATE_UNSPECIFIED = 470755401 + + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) class RolloutsListResponse(proto.Message): @@ -147447,12 +148230,14 @@ class Snapshot(proto.Message): This field is a member of `oneof`_ ``_snapshot_encryption_key``. snapshot_group_id (str): Output only. [Output Only] The unique ID of the snapshot - group that this snapshot belongs to. + group that this snapshot belongs to. The usage of snapshot + group feature is restricted. This field is a member of `oneof`_ ``_snapshot_group_id``. snapshot_group_name (str): Output only. [Output only] The snapshot group that this - snapshot belongs to. + snapshot belongs to. The usage of snapshot group feature is + restricted. This field is a member of `oneof`_ ``_snapshot_group_name``. snapshot_type (str): @@ -148495,6 +149280,33 @@ class SnapshotSettingsStorageLocationSettingsStorageLocationPreference(proto.Mes ) +class SnapshotUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + snapshot. If empty, the snapshot will be re-encrypted using + the primary version of the snapshot's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/region/keyRings/key_ring/cryptoKeys/key + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class SnapshotsScopedList(proto.Message): r""" @@ -161317,6 +162129,264 @@ class MostDisruptiveAllowedAction(proto.Enum): ) +class UpdateKmsKeyDiskRequest(proto.Message): + r"""A request message for Disks.UpdateKmsKey. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the Disk resource, should conform to + RFC1035. + disk_update_kms_key_request_resource (google.cloud.compute_v1beta.types.DiskUpdateKmsKeyRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + disk_update_kms_key_request_resource: "DiskUpdateKmsKeyRequest" = proto.Field( + proto.MESSAGE, + number=178025952, + message="DiskUpdateKmsKeyRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateKmsKeyRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.UpdateKmsKey. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the Disk resource, should conform to + RFC1035. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_disk_update_kms_key_request_resource (google.cloud.compute_v1beta.types.RegionDiskUpdateKmsKeyRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_disk_update_kms_key_request_resource: "RegionDiskUpdateKmsKeyRequest" = ( + proto.Field( + proto.MESSAGE, + number=10015819, + message="RegionDiskUpdateKmsKeyRequest", + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateKmsKeyRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.UpdateKmsKey. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + region_snapshot_update_kms_key_request_resource (google.cloud.compute_v1beta.types.RegionSnapshotUpdateKmsKeyRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot (str): + Name of the snapshot resource to update. + Should conform to RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_snapshot_update_kms_key_request_resource: "RegionSnapshotUpdateKmsKeyRequest" = proto.Field( + proto.MESSAGE, + number=279384818, + message="RegionSnapshotUpdateKmsKeyRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot: str = proto.Field( + proto.STRING, + number=284874180, + ) + + +class UpdateKmsKeySnapshotRequest(proto.Message): + r"""A request message for Snapshots.UpdateKmsKey. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot (str): + Name of the snapshot resource to update. + Should conform to RFC1035. + snapshot_update_kms_key_request_resource (google.cloud.compute_v1beta.types.SnapshotUpdateKmsKeyRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + snapshot: str = proto.Field( + proto.STRING, + number=284874180, + ) + snapshot_update_kms_key_request_resource: "SnapshotUpdateKmsKeyRequest" = ( + proto.Field( + proto.MESSAGE, + number=103145991, + message="SnapshotUpdateKmsKeyRequest", + ) + ) + + class UpdateLicenseRequest(proto.Message): r"""A request message for Licenses.Update. See the method description for details. @@ -163113,6 +164183,10 @@ class UrlMap(proto.Message): [Output Only] Server-defined URL for the resource. This field is a member of `oneof`_ ``_self_link``. + status (google.cloud.compute_v1beta.types.UrlMapStatus): + Output only. [Output Only] The status of the URL map. + + This field is a member of `oneof`_ ``_status``. tests (MutableSequence[google.cloud.compute_v1beta.types.UrlMapTest]): The list of expected URL mapping tests. Request to update theUrlMap succeeds only if all @@ -163203,6 +164277,12 @@ class UrlMap(proto.Message): number=456214797, optional=True, ) + status: "UrlMapStatus" = proto.Field( + proto.MESSAGE, + number=181260274, + optional=True, + message="UrlMapStatus", + ) tests: MutableSequence["UrlMapTest"] = proto.RepeatedField( proto.MESSAGE, number=110251553, @@ -163284,6 +164364,36 @@ def raw_page(self): ) +class UrlMapQuotaUsage(proto.Message): + r"""Message representing the quota usage for a UrlMap. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + forwarding_rules (int): + Output only. The number of forwarding rules + that uses this UrlMap. + + This field is a member of `oneof`_ ``_forwarding_rules``. + units (int): + Output only. The number of quota units + calculated for this UrlMap. + + This field is a member of `oneof`_ ``_units``. + """ + + forwarding_rules: int = proto.Field( + proto.INT32, + number=315821365, + optional=True, + ) + units: int = proto.Field( + proto.INT64, + number=111433583, + optional=True, + ) + + class UrlMapReference(proto.Message): r""" @@ -163302,6 +164412,27 @@ class UrlMapReference(proto.Message): ) +class UrlMapStatus(proto.Message): + r"""[Output Only] The status of the URL map. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + quota_usage (google.cloud.compute_v1beta.types.UrlMapQuotaUsage): + Output only. Summary of quota usage for given + UrlMap. + + This field is a member of `oneof`_ ``_quota_usage``. + """ + + quota_usage: "UrlMapQuotaUsage" = proto.Field( + proto.MESSAGE, + number=238511386, + optional=True, + message="UrlMapQuotaUsage", + ) + + class UrlMapTest(proto.Message): r"""Message for the expected URL mappings. @@ -163448,6 +164579,11 @@ class UrlMapValidationResult(proto.Message): reasons. This field is a member of `oneof`_ ``_load_succeeded``. + quota_usage (google.cloud.compute_v1beta.types.UrlMapQuotaUsage): + Output only. Summary of quota usage for given + UrlMap. + + This field is a member of `oneof`_ ``_quota_usage``. test_failures (MutableSequence[google.cloud.compute_v1beta.types.TestFailure]): test_passed (bool): @@ -163467,6 +164603,12 @@ class UrlMapValidationResult(proto.Message): number=128326216, optional=True, ) + quota_usage: "UrlMapQuotaUsage" = proto.Field( + proto.MESSAGE, + number=238511386, + optional=True, + message="UrlMapQuotaUsage", + ) test_failures: MutableSequence["TestFailure"] = proto.RepeatedField( proto.MESSAGE, number=505934134, diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_disks_update_kms_key_sync.py b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_disks_update_kms_key_sync.py new file mode 100644 index 000000000000..c5be95e11383 --- /dev/null +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_disks_update_kms_key_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute-v1beta + + +# [START compute_v1beta_generated_Disks_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1beta + + +def sample_update_kms_key(): + # Create a client + client = compute_v1beta.DisksClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1beta_generated_Disks_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_composite_health_checks_get_health_sync.py b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_composite_health_checks_get_health_sync.py new file mode 100644 index 000000000000..4a257715c524 --- /dev/null +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_composite_health_checks_get_health_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute-v1beta + + +# [START compute_v1beta_generated_RegionCompositeHealthChecks_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1beta + + +def sample_get_health(): + # Create a client + client = compute_v1beta.RegionCompositeHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1beta.GetHealthRegionCompositeHealthCheckRequest( + composite_health_check="composite_health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + +# [END compute_v1beta_generated_RegionCompositeHealthChecks_GetHealth_sync] diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_disks_update_kms_key_sync.py b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_disks_update_kms_key_sync.py new file mode 100644 index 000000000000..414ced60a10b --- /dev/null +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_disks_update_kms_key_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute-v1beta + + +# [START compute_v1beta_generated_RegionDisks_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1beta + + +def sample_update_kms_key(): + # Create a client + client = compute_v1beta.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1beta_generated_RegionDisks_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_health_sources_get_health_sync.py b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_health_sources_get_health_sync.py new file mode 100644 index 000000000000..b7b45fd659bd --- /dev/null +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_health_sources_get_health_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute-v1beta + + +# [START compute_v1beta_generated_RegionHealthSources_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1beta + + +def sample_get_health(): + # Create a client + client = compute_v1beta.RegionHealthSourcesClient() + + # Initialize request argument(s) + request = compute_v1beta.GetHealthRegionHealthSourceRequest( + health_source="health_source_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + +# [END compute_v1beta_generated_RegionHealthSources_GetHealth_sync] diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_snapshots_update_kms_key_sync.py b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_snapshots_update_kms_key_sync.py new file mode 100644 index 000000000000..bac7a8e88b9b --- /dev/null +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_region_snapshots_update_kms_key_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute-v1beta + + +# [START compute_v1beta_generated_RegionSnapshots_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1beta + + +def sample_update_kms_key(): + # Create a client + client = compute_v1beta.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeyRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1beta_generated_RegionSnapshots_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_snapshots_update_kms_key_sync.py b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_snapshots_update_kms_key_sync.py new file mode 100644 index 000000000000..05f29480a622 --- /dev/null +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/compute_v1beta_generated_snapshots_update_kms_key_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute-v1beta + + +# [START compute_v1beta_generated_Snapshots_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1beta + + +def sample_update_kms_key(): + # Create a client + client = compute_v1beta.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1beta.UpdateKmsKeySnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1beta_generated_Snapshots_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute-v1beta/samples/generated_samples/snippet_metadata_google.cloud.compute.v1beta.json b/packages/google-cloud-compute-v1beta/samples/generated_samples/snippet_metadata_google.cloud.compute.v1beta.json index 23c7f78d5491..ff64f182c8d7 100644 --- a/packages/google-cloud-compute-v1beta/samples/generated_samples/snippet_metadata_google.cloud.compute.v1beta.json +++ b/packages/google-cloud-compute-v1beta/samples/generated_samples/snippet_metadata_google.cloud.compute.v1beta.json @@ -6843,6 +6843,98 @@ ], "title": "compute_v1beta_generated_disks_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1beta.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1beta.DisksClient.update_kms_key", + "method": { + "fullName": "google.cloud.compute.v1beta.Disks.UpdateKmsKey", + "service": { + "fullName": "google.cloud.compute.v1beta.Disks", + "shortName": "Disks" + }, + "shortName": "UpdateKmsKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1beta.types.UpdateKmsKeyDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disk_update_kms_key_request_resource", + "type": "google.cloud.compute_v1beta.types.DiskUpdateKmsKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" + }, + "description": "Sample for UpdateKmsKey", + "file": "compute_v1beta_generated_disks_update_kms_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1beta_generated_Disks_UpdateKmsKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1beta_generated_disks_update_kms_key_sync.py" + }, { "canonical": true, "clientMethod": { @@ -48371,6 +48463,94 @@ ], "title": "compute_v1beta_generated_region_composite_health_checks_delete_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1beta.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1beta.RegionCompositeHealthChecksClient.get_health", + "method": { + "fullName": "google.cloud.compute.v1beta.RegionCompositeHealthChecks.GetHealth", + "service": { + "fullName": "google.cloud.compute.v1beta.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" + }, + "shortName": "GetHealth" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1beta.types.GetHealthRegionCompositeHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "composite_health_check", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1beta.types.CompositeHealthCheckHealth", + "shortName": "get_health" + }, + "description": "Sample for GetHealth", + "file": "compute_v1beta_generated_region_composite_health_checks_get_health_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1beta_generated_RegionCompositeHealthChecks_GetHealth_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1beta_generated_region_composite_health_checks_get_health_sync.py" + }, { "canonical": true, "clientMethod": { @@ -50595,6 +50775,98 @@ ], "title": "compute_v1beta_generated_region_disks_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1beta.RegionDisksClient", + "shortName": "RegionDisksClient" + }, + "fullName": "google.cloud.compute_v1beta.RegionDisksClient.update_kms_key", + "method": { + "fullName": "google.cloud.compute.v1beta.RegionDisks.UpdateKmsKey", + "service": { + "fullName": "google.cloud.compute.v1beta.RegionDisks", + "shortName": "RegionDisks" + }, + "shortName": "UpdateKmsKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1beta.types.UpdateKmsKeyRegionDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "region_disk_update_kms_key_request_resource", + "type": "google.cloud.compute_v1beta.types.RegionDiskUpdateKmsKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" + }, + "description": "Sample for UpdateKmsKey", + "file": "compute_v1beta_generated_region_disks_update_kms_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1beta_generated_RegionDisks_UpdateKmsKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1beta_generated_region_disks_update_kms_key_sync.py" + }, { "canonical": true, "clientMethod": { @@ -52703,6 +52975,94 @@ ], "title": "compute_v1beta_generated_region_health_sources_delete_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1beta.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" + }, + "fullName": "google.cloud.compute_v1beta.RegionHealthSourcesClient.get_health", + "method": { + "fullName": "google.cloud.compute.v1beta.RegionHealthSources.GetHealth", + "service": { + "fullName": "google.cloud.compute.v1beta.RegionHealthSources", + "shortName": "RegionHealthSources" + }, + "shortName": "GetHealth" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1beta.types.GetHealthRegionHealthSourceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "health_source", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1beta.types.HealthSourceHealth", + "shortName": "get_health" + }, + "description": "Sample for GetHealth", + "file": "compute_v1beta_generated_region_health_sources_get_health_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1beta_generated_RegionHealthSources_GetHealth_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1beta_generated_region_health_sources_get_health_sync.py" + }, { "canonical": true, "clientMethod": { @@ -64911,6 +65271,98 @@ ], "title": "compute_v1beta_generated_region_snapshots_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1beta.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" + }, + "fullName": "google.cloud.compute_v1beta.RegionSnapshotsClient.update_kms_key", + "method": { + "fullName": "google.cloud.compute.v1beta.RegionSnapshots.UpdateKmsKey", + "service": { + "fullName": "google.cloud.compute.v1beta.RegionSnapshots", + "shortName": "RegionSnapshots" + }, + "shortName": "UpdateKmsKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1beta.types.UpdateKmsKeyRegionSnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "region_snapshot_update_kms_key_request_resource", + "type": "google.cloud.compute_v1beta.types.RegionSnapshotUpdateKmsKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" + }, + "description": "Sample for UpdateKmsKey", + "file": "compute_v1beta_generated_region_snapshots_update_kms_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1beta_generated_RegionSnapshots_UpdateKmsKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1beta_generated_region_snapshots_update_kms_key_sync.py" + }, { "canonical": true, "clientMethod": { @@ -78695,6 +79147,94 @@ ], "title": "compute_v1beta_generated_snapshots_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1beta.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1beta.SnapshotsClient.update_kms_key", + "method": { + "fullName": "google.cloud.compute.v1beta.Snapshots.UpdateKmsKey", + "service": { + "fullName": "google.cloud.compute.v1beta.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "UpdateKmsKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1beta.types.UpdateKmsKeySnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "snapshot_update_kms_key_request_resource", + "type": "google.cloud.compute_v1beta.types.SnapshotUpdateKmsKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" + }, + "description": "Sample for UpdateKmsKey", + "file": "compute_v1beta_generated_snapshots_update_kms_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1beta_generated_Snapshots_UpdateKmsKey_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1beta_generated_snapshots_update_kms_key_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_backend_services.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_backend_services.py index 7383043c9298..2f2707230311 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_backend_services.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_backend_services.py @@ -8027,6 +8027,7 @@ def test_insert_rest_call_success(request_type): "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "service": "service_value", "traffic_duration": "traffic_duration_value", } ], @@ -8744,6 +8745,7 @@ def test_patch_rest_call_success(request_type): "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "service": "service_value", "traffic_duration": "traffic_duration_value", } ], @@ -10155,6 +10157,7 @@ def test_update_rest_call_success(request_type): "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "service": "service_value", "traffic_duration": "traffic_duration_value", } ], diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_disks.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_disks.py index 6193769d092c..8ea0147e0177 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_disks.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_disks.py @@ -8033,6 +8033,432 @@ def test_update_unary_rest_flattened_error(transport: str = "rest"): ) +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeyDiskRequest, +): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "diskUpdateKmsKeyRequestResource", + "project", + "zone", + ) + ) + ) + + +def test_update_kms_key_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/zones/{zone}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeyDiskRequest(), + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeyDiskRequest, +): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "diskUpdateKmsKeyRequestResource", + "project", + "zone", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/zones/{zone}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeyDiskRequest(), + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.DisksRestTransport( @@ -12394,6 +12820,252 @@ def test_update_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request(request_type=compute.UpdateKmsKeyDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeyDiskRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["disk_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeyDiskRequest.meta.fields[ + "disk_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disk_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["disk_update_kms_key_request_resource"][field]) + ): + del request_init["disk_update_kms_key_request_resource"][field][i][ + subfield + ] + else: + del request_init["disk_update_kms_key_request_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DisksRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.DisksRestInterceptor, "post_update_kms_key_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.DisksRestInterceptor, "pre_update_kms_key") as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeyDiskRequest.pb( + compute.UpdateKmsKeyDiskRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeyDiskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = DisksClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12793,6 +13465,26 @@ def test_update_unary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeyDiskRequest() + + assert args[0] == request_msg + + def test_disks_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -12834,6 +13526,7 @@ def test_disks_base_transport(): "stop_group_async_replication", "test_iam_permissions", "update", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -13034,6 +13727,9 @@ def test_disks_client_transport_session_collision(transport_name): session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_future_reservations.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_future_reservations.py index c0f45302457f..3270abc78278 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_future_reservations.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_future_reservations.py @@ -4291,6 +4291,7 @@ def test_get_rest_call_success(request_type): return_value = compute.FutureReservation( auto_created_reservations_delete_time="auto_created_reservations_delete_time_value", auto_delete_auto_created_reservations=True, + confidential_compute_type="confidential_compute_type_value", creation_timestamp="creation_timestamp_value", deployment_type="deployment_type_value", description="description_value", @@ -4329,6 +4330,7 @@ def test_get_rest_call_success(request_type): == "auto_created_reservations_delete_time_value" ) assert response.auto_delete_auto_created_reservations is True + assert response.confidential_compute_type == "confidential_compute_type_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.deployment_type == "deployment_type_value" assert response.description == "description_value" @@ -4478,6 +4480,7 @@ def test_insert_rest_call_success(request_type): "commitment_plan": "commitment_plan_value", "previous_commitment_terms": "previous_commitment_terms_value", }, + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "deployment_type": "deployment_type_value", "description": "description_value", @@ -4486,6 +4489,7 @@ def test_insert_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "name_prefix": "name_prefix_value", + "params": {"resource_manager_tags": {}}, "planning_status": "planning_status_value", "protection_tier": "protection_tier_value", "reservation_mode": "reservation_mode_value", @@ -4975,6 +4979,7 @@ def test_update_rest_call_success(request_type): "commitment_plan": "commitment_plan_value", "previous_commitment_terms": "previous_commitment_terms_value", }, + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "deployment_type": "deployment_type_value", "description": "description_value", @@ -4983,6 +4988,7 @@ def test_update_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "name_prefix": "name_prefix_value", + "params": {"resource_manager_tags": {}}, "planning_status": "planning_status_value", "protection_tier": "protection_tier_value", "reservation_mode": "reservation_mode_value", diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_group_managers.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_group_managers.py index 9d62cdbb75f9..45a6257c2aec 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_group_managers.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_group_managers.py @@ -13808,7 +13808,7 @@ def test_configure_accelerator_topologies_rest_call_success(request_type): } request_init[ "instance_group_managers_configure_accelerator_topologies_request_resource" - ] = {"accelerator_topology_actions": {}} + ] = {"accelerator_topology_actions": {}, "accelerator_topology_configurations": {}} # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency # See https://github.com/googleapis/gapic-generator-python/issues/1748 diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_templates.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_templates.py index 1b020f82fbdf..f12ecc75fd56 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_templates.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instance_templates.py @@ -4151,6 +4151,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instances.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instances.py index c0fce2950c01..dc4f384dd0b5 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instances.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_instances.py @@ -23285,6 +23285,7 @@ def test_add_network_interface_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -24359,6 +24360,7 @@ def test_bulk_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -26738,6 +26740,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -33724,6 +33727,7 @@ def test_update_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -34616,6 +34620,7 @@ def test_update_network_interface_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_machine_images.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_machine_images.py index ca674741a54a..2c59ed0fb510 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_machine_images.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_machine_images.py @@ -4121,6 +4121,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_network_attachments.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_network_attachments.py index c13a0f91159d..51112874a57d 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_network_attachments.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_network_attachments.py @@ -4602,6 +4602,7 @@ def test_insert_rest_call_success(request_type): "secondary_ip_cidr_ranges_value1", "secondary_ip_cidr_ranges_value2", ], + "service_class_id": "service_class_id_value", "status": "status_value", "subnetwork": "subnetwork_value", "subnetwork_cidr_range": "subnetwork_cidr_range_value", @@ -5025,6 +5026,7 @@ def test_patch_rest_call_success(request_type): "secondary_ip_cidr_ranges_value1", "secondary_ip_cidr_ranges_value2", ], + "service_class_id": "service_class_id_value", "status": "status_value", "subnetwork": "subnetwork_value", "subnetwork_cidr_range": "subnetwork_cidr_range_value", diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_backend_services.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_backend_services.py index 208c8c445072..165685bf62be 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_backend_services.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_backend_services.py @@ -5796,6 +5796,7 @@ def test_insert_rest_call_success(request_type): "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "service": "service_value", "traffic_duration": "traffic_duration_value", } ], @@ -6524,6 +6525,7 @@ def test_patch_rest_call_success(request_type): "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "service": "service_value", "traffic_duration": "traffic_duration_value", } ], @@ -7702,6 +7704,7 @@ def test_update_rest_call_success(request_type): "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "service": "service_value", "traffic_duration": "traffic_duration_value", } ], diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_commitments.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_commitments.py index 18133777c558..c71b0f00c64f 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_commitments.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_commitments.py @@ -3907,6 +3907,7 @@ def test_insert_rest_call_success(request_type): "merge_source_commitments_value2", ], "name": "name_value", + "params": {"resource_manager_tags": {}}, "plan": "plan_value", "region": "region_value", "reservations": [ @@ -3931,6 +3932,7 @@ def test_insert_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", @@ -4645,6 +4647,7 @@ def test_update_rest_call_success(request_type): "merge_source_commitments_value2", ], "name": "name_value", + "params": {"resource_manager_tags": {}}, "plan": "plan_value", "region": "region_value", "reservations": [ @@ -4669,6 +4672,7 @@ def test_update_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", @@ -5040,6 +5044,7 @@ def test_update_reservations_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_composite_health_checks.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_composite_health_checks.py index fdee60a0e91d..58e8ac2cf09d 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_composite_health_checks.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_composite_health_checks.py @@ -2132,6 +2132,209 @@ def test_get_rest_flattened_error(transport: str = "rest"): ) +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_health_rest_required_fields( + request_type=compute.GetHealthRegionCompositeHealthCheckRequest, +): + transport_class = transports.RegionCompositeHealthChecksRestTransport + + request_init = {} + request_init["composite_health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["compositeHealthCheck"] = "composite_health_check_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "compositeHealthCheck" in jsonified_request + assert jsonified_request["compositeHealthCheck"] == "composite_health_check_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.CompositeHealthCheckHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.CompositeHealthCheckHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_health(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.RegionCompositeHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "compositeHealthCheck", + "project", + "region", + ) + ) + ) + + +def test_get_health_rest_flattened(): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.CompositeHealthCheckHealth() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "composite_health_check": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + composite_health_check="composite_health_check_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.CompositeHealthCheckHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/regions/{region}/compositeHealthChecks/{composite_health_check}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthRegionCompositeHealthCheckRequest(), + project="project_value", + region="region_value", + composite_health_check="composite_health_check_value", + ) + + def test_insert_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4033,6 +4236,149 @@ def test_get_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_health_rest_bad_request( + request_type=compute.GetHealthRegionCompositeHealthCheckRequest, +): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "composite_health_check": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_health(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetHealthRegionCompositeHealthCheckRequest, + dict, + ], +) +def test_get_health_rest_call_success(request_type): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "composite_health_check": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.CompositeHealthCheckHealth( + health_state="health_state_value", + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.CompositeHealthCheckHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.CompositeHealthCheckHealth) + assert response.health_state == "health_state_value" + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.RegionCompositeHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionCompositeHealthChecksRestInterceptor(), + ) + client = RegionCompositeHealthChecksClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionCompositeHealthChecksRestInterceptor, "post_get_health" + ) as post, + mock.patch.object( + transports.RegionCompositeHealthChecksRestInterceptor, + "post_get_health_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionCompositeHealthChecksRestInterceptor, "pre_get_health" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetHealthRegionCompositeHealthCheckRequest.pb( + compute.GetHealthRegionCompositeHealthCheckRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.CompositeHealthCheckHealth.to_json( + compute.CompositeHealthCheckHealth() + ) + req.return_value.content = return_value + + request = compute.GetHealthRegionCompositeHealthCheckRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.CompositeHealthCheckHealth() + post_with_metadata.return_value = compute.CompositeHealthCheckHealth(), metadata + + client.get_health( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_insert_rest_bad_request( request_type=compute.InsertRegionCompositeHealthCheckRequest, ): @@ -4984,6 +5330,26 @@ def test_get_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_health_empty_call_rest(): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_health), "__call__") as call: + client.get_health(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetHealthRegionCompositeHealthCheckRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_insert_unary_empty_call_rest(): @@ -5091,6 +5457,7 @@ def test_region_composite_health_checks_base_transport(): "aggregated_list", "delete", "get", + "get_health", "insert", "list", "patch", @@ -5249,6 +5616,9 @@ def test_region_composite_health_checks_client_transport_session_collision( session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_disks.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_disks.py index eff35bd15762..9fdb083637b9 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_disks.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_disks.py @@ -7343,6 +7343,432 @@ def test_update_unary_rest_flattened_error(transport: str = "rest"): ) +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionDiskRequest, +): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "project", + "region", + "regionDiskUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/regions/{region}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeyRegionDiskRequest(), + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionDiskRequest, +): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "project", + "region", + "regionDiskUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/regions/{region}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeyRegionDiskRequest(), + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.RegionDisksRestTransport( @@ -11389,6 +11815,263 @@ def test_update_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request( + request_type=compute.UpdateKmsKeyRegionDiskRequest, +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeyRegionDiskRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init["region_disk_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeyRegionDiskRequest.meta.fields[ + "region_disk_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disk_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["region_disk_update_kms_key_request_resource"][ + field + ] + ), + ): + del request_init["region_disk_update_kms_key_request_resource"][ + field + ][i][subfield] + else: + del request_init["region_disk_update_kms_key_request_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionDisksRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.RegionDisksRestInterceptor, "post_update_kms_key_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionDisksRestInterceptor, "pre_update_kms_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeyRegionDiskRequest.pb( + compute.UpdateKmsKeyRegionDiskRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeyRegionDiskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = RegionDisksClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11748,6 +12431,26 @@ def test_update_unary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeyRegionDiskRequest() + + assert args[0] == request_msg + + def test_region_disks_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -11787,6 +12490,7 @@ def test_region_disks_base_transport(): "stop_group_async_replication", "test_iam_permissions", "update", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -11981,6 +12685,9 @@ def test_region_disks_client_transport_session_collision(transport_name): session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_health_sources.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_health_sources.py index ac6305edb6fb..9101d6ed520c 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_health_sources.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_health_sources.py @@ -2089,6 +2089,209 @@ def test_get_rest_flattened_error(transport: str = "rest"): ) +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_health_rest_required_fields( + request_type=compute.GetHealthRegionHealthSourceRequest, +): + transport_class = transports.RegionHealthSourcesRestTransport + + request_init = {} + request_init["health_source"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthSource"] = "health_source_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthSource" in jsonified_request + assert jsonified_request["healthSource"] == "health_source_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthSourceHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.HealthSourceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_health(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.RegionHealthSourcesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "healthSource", + "project", + "region", + ) + ) + ) + + +def test_get_health_rest_flattened(): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthSourceHealth() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_source": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + health_source="health_source_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.HealthSourceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/regions/{region}/healthSources/{health_source}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthRegionHealthSourceRequest(), + project="project_value", + region="region_value", + health_source="health_source_value", + ) + + def test_insert_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3978,6 +4181,147 @@ def test_get_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_health_rest_bad_request( + request_type=compute.GetHealthRegionHealthSourceRequest, +): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_source": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_health(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetHealthRegionHealthSourceRequest, + dict, + ], +) +def test_get_health_rest_call_success(request_type): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_source": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthSourceHealth( + health_state="health_state_value", + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.HealthSourceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.HealthSourceHealth) + assert response.health_state == "health_state_value" + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.RegionHealthSourcesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionHealthSourcesRestInterceptor(), + ) + client = RegionHealthSourcesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionHealthSourcesRestInterceptor, "post_get_health" + ) as post, + mock.patch.object( + transports.RegionHealthSourcesRestInterceptor, + "post_get_health_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionHealthSourcesRestInterceptor, "pre_get_health" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetHealthRegionHealthSourceRequest.pb( + compute.GetHealthRegionHealthSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.HealthSourceHealth.to_json(compute.HealthSourceHealth()) + req.return_value.content = return_value + + request = compute.GetHealthRegionHealthSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthSourceHealth() + post_with_metadata.return_value = compute.HealthSourceHealth(), metadata + + client.get_health( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_insert_rest_bad_request(request_type=compute.InsertRegionHealthSourceRequest): client = RegionHealthSourcesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -4908,6 +5252,26 @@ def test_get_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_health_empty_call_rest(): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_health), "__call__") as call: + client.get_health(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetHealthRegionHealthSourceRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_insert_unary_empty_call_rest(): @@ -5015,6 +5379,7 @@ def test_region_health_sources_base_transport(): "aggregated_list", "delete", "get", + "get_health", "insert", "list", "patch", @@ -5171,6 +5536,9 @@ def test_region_health_sources_client_transport_session_collision(transport_name session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instance_templates.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instance_templates.py index 1cf3cd5d5cf1..39c018289bd3 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instance_templates.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instance_templates.py @@ -3101,6 +3101,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instances.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instances.py index 3739f6804256..ad0038e59576 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instances.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_instances.py @@ -1836,6 +1836,7 @@ def test_bulk_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_snapshots.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_snapshots.py index 980d8fb752ac..769dda4217d8 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_snapshots.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_snapshots.py @@ -3517,6 +3517,440 @@ def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): ) +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "regionSnapshotUpdateKmsKeyRequestResource", + "snapshot", + ) + ) + ) + + +def test_update_kms_key_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/regions/{region}/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeyRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "regionSnapshotUpdateKmsKeyRequestResource", + "snapshot", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/regions/{region}/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeyRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.RegionSnapshotsRestTransport( @@ -5241,6 +5675,264 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request( + request_type=compute.UpdateKmsKeyRegionSnapshotRequest, +): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeyRegionSnapshotRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request_init["region_snapshot_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeyRegionSnapshotRequest.meta.fields[ + "region_snapshot_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_snapshot_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["region_snapshot_update_kms_key_request_resource"][ + field + ] + ), + ): + del request_init["region_snapshot_update_kms_key_request_resource"][ + field + ][i][subfield] + else: + del request_init["region_snapshot_update_kms_key_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, + "post_update_kms_key_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_update_kms_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeyRegionSnapshotRequest.pb( + compute.UpdateKmsKeyRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeyRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = RegionSnapshotsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -5410,6 +6102,26 @@ def test_test_iam_permissions_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeyRegionSnapshotRequest() + + assert args[0] == request_msg + + def test_region_snapshots_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -5440,6 +6152,7 @@ def test_region_snapshots_base_transport(): "set_iam_policy", "set_labels", "test_iam_permissions", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5607,6 +6320,9 @@ def test_region_snapshots_client_transport_session_collision(transport_name): session1 = client1.transport.test_iam_permissions._session session2 = client2.transport.test_iam_permissions._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_url_maps.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_url_maps.py index f501139013e1..93b3e46ff3a4 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_url_maps.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_region_url_maps.py @@ -4814,6 +4814,7 @@ def test_insert_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", @@ -5662,6 +5663,7 @@ def test_patch_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", @@ -6338,6 +6340,7 @@ def test_update_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", @@ -6807,6 +6810,7 @@ def test_validate_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_reservations.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_reservations.py index 074bbafc2830..de029909ac22 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_reservations.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_reservations.py @@ -5117,6 +5117,7 @@ def test_get_rest_call_success(request_type): # Designate an appropriate value for the returned response. return_value = compute.Reservation( commitment="commitment_value", + confidential_compute_type="confidential_compute_type_value", creation_timestamp="creation_timestamp_value", delete_at_time="delete_at_time_value", deployment_type="deployment_type_value", @@ -5152,6 +5153,7 @@ def test_get_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Reservation) assert response.commitment == "commitment_value" + assert response.confidential_compute_type == "confidential_compute_type_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.delete_at_time == "delete_at_time_value" assert response.deployment_type == "deployment_type_value" @@ -5423,6 +5425,7 @@ def test_insert_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", @@ -6863,6 +6866,7 @@ def test_update_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_snapshots.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_snapshots.py index c23423a3a06a..257fa7696b77 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_snapshots.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_snapshots.py @@ -3623,6 +3623,418 @@ def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): ) +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeySnapshotRequest, +): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "snapshot", + "snapshotUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "snapshot": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/global/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeySnapshotRequest(), + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeySnapshotRequest, +): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "snapshot", + "snapshotUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "snapshot": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/beta/projects/{project}/global/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeySnapshotRequest(), + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SnapshotsRestTransport( @@ -5445,6 +5857,259 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request( + request_type=compute.UpdateKmsKeySnapshotRequest, +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeySnapshotRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request_init["snapshot_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeySnapshotRequest.meta.fields[ + "snapshot_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "snapshot_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["snapshot_update_kms_key_request_resource"][field] + ), + ): + del request_init["snapshot_update_kms_key_request_resource"][field][ + i + ][subfield] + else: + del request_init["snapshot_update_kms_key_request_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.SnapshotsRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.SnapshotsRestInterceptor, "post_update_kms_key_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.SnapshotsRestInterceptor, "pre_update_kms_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeySnapshotRequest.pb( + compute.UpdateKmsKeySnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeySnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = SnapshotsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -5634,6 +6299,26 @@ def test_test_iam_permissions_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeySnapshotRequest() + + assert args[0] == request_msg + + def test_snapshots_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -5665,6 +6350,7 @@ def test_snapshots_base_transport(): "set_iam_policy", "set_labels", "test_iam_permissions", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5835,6 +6521,9 @@ def test_snapshots_client_transport_session_collision(transport_name): session1 = client1.transport.test_iam_permissions._session session2 = client2.transport.test_iam_permissions._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_url_maps.py b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_url_maps.py index 29c62e5ae48a..0031f228873a 100644 --- a/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_url_maps.py +++ b/packages/google-cloud-compute-v1beta/tests/unit/gapic/compute_v1beta/test_url_maps.py @@ -5023,6 +5023,7 @@ def test_insert_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", @@ -5858,6 +5859,7 @@ def test_patch_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", @@ -6525,6 +6527,7 @@ def test_update_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", @@ -6992,6 +6995,7 @@ def test_validate_rest_call_success(request_type): ], "region": "region_value", "self_link": "self_link_value", + "status": {"quota_usage": {"forwarding_rules": 1725, "units": 563}}, "tests": [ { "description": "description_value", diff --git a/packages/google-cloud-compute/docs/compute_v1/instant_snapshot_groups.rst b/packages/google-cloud-compute/docs/compute_v1/instant_snapshot_groups.rst new file mode 100644 index 000000000000..5296c0e3ca68 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/instant_snapshot_groups.rst @@ -0,0 +1,10 @@ +InstantSnapshotGroups +--------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.instant_snapshot_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.instant_snapshot_groups.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/region_backend_buckets.rst b/packages/google-cloud-compute/docs/compute_v1/region_backend_buckets.rst new file mode 100644 index 000000000000..93d4a5051f12 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/region_backend_buckets.rst @@ -0,0 +1,10 @@ +RegionBackendBuckets +-------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_backend_buckets + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_backend_buckets.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/region_instance_group_manager_resize_requests.rst b/packages/google-cloud-compute/docs/compute_v1/region_instance_group_manager_resize_requests.rst new file mode 100644 index 000000000000..022fa2658b7a --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/region_instance_group_manager_resize_requests.rst @@ -0,0 +1,10 @@ +RegionInstanceGroupManagerResizeRequests +---------------------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_instance_group_manager_resize_requests + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/region_instant_snapshot_groups.rst b/packages/google-cloud-compute/docs/compute_v1/region_instant_snapshot_groups.rst new file mode 100644 index 000000000000..649a5848904b --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/region_instant_snapshot_groups.rst @@ -0,0 +1,10 @@ +RegionInstantSnapshotGroups +--------------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_instant_snapshot_groups + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_instant_snapshot_groups.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/region_snapshot_settings.rst b/packages/google-cloud-compute/docs/compute_v1/region_snapshot_settings.rst new file mode 100644 index 000000000000..de41d9995318 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/region_snapshot_settings.rst @@ -0,0 +1,6 @@ +RegionSnapshotSettings +---------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_snapshot_settings + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/region_snapshots.rst b/packages/google-cloud-compute/docs/compute_v1/region_snapshots.rst new file mode 100644 index 000000000000..7e83ac909758 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/region_snapshots.rst @@ -0,0 +1,10 @@ +RegionSnapshots +--------------------------------- + +.. automodule:: google.cloud.compute_v1.services.region_snapshots + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.region_snapshots.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/docs/compute_v1/services_.rst b/packages/google-cloud-compute/docs/compute_v1/services_.rst index 91460717f25f..099c91825ded 100644 --- a/packages/google-cloud-compute/docs/compute_v1/services_.rst +++ b/packages/google-cloud-compute/docs/compute_v1/services_.rst @@ -32,6 +32,7 @@ Services for Google Cloud Compute v1 API instances instance_settings_service instance_templates + instant_snapshot_groups instant_snapshots interconnect_attachment_groups interconnect_attachments @@ -59,6 +60,7 @@ Services for Google Cloud Compute v1 API public_advertised_prefixes public_delegated_prefixes region_autoscalers + region_backend_buckets region_backend_services region_commitments region_composite_health_checks @@ -68,10 +70,12 @@ Services for Google Cloud Compute v1 API region_health_checks region_health_check_services region_health_sources + region_instance_group_manager_resize_requests region_instance_group_managers region_instance_groups region_instances region_instance_templates + region_instant_snapshot_groups region_instant_snapshots region_network_endpoint_groups region_network_firewall_policies @@ -79,6 +83,8 @@ Services for Google Cloud Compute v1 API region_operations regions region_security_policies + region_snapshots + region_snapshot_settings region_ssl_certificates region_ssl_policies region_target_http_proxies @@ -116,3 +122,4 @@ Services for Google Cloud Compute v1 API wire_groups zone_operations zones + zone_vm_extension_policies diff --git a/packages/google-cloud-compute/docs/compute_v1/zone_vm_extension_policies.rst b/packages/google-cloud-compute/docs/compute_v1/zone_vm_extension_policies.rst new file mode 100644 index 000000000000..064da0369910 --- /dev/null +++ b/packages/google-cloud-compute/docs/compute_v1/zone_vm_extension_policies.rst @@ -0,0 +1,10 @@ +ZoneVmExtensionPolicies +----------------------------------------- + +.. automodule:: google.cloud.compute_v1.services.zone_vm_extension_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.compute_v1.services.zone_vm_extension_policies.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-compute/google/cloud/compute/__init__.py b/packages/google-cloud-compute/google/cloud/compute/__init__.py index eb5a04a52823..be12306e4122 100644 --- a/packages/google-cloud-compute/google/cloud/compute/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute/__init__.py @@ -83,6 +83,9 @@ InstanceTemplatesClient, ) from google.cloud.compute_v1.services.instances.client import InstancesClient +from google.cloud.compute_v1.services.instant_snapshot_groups.client import ( + InstantSnapshotGroupsClient, +) from google.cloud.compute_v1.services.instant_snapshots.client import ( InstantSnapshotsClient, ) @@ -144,6 +147,9 @@ from google.cloud.compute_v1.services.region_autoscalers.client import ( RegionAutoscalersClient, ) +from google.cloud.compute_v1.services.region_backend_buckets.client import ( + RegionBackendBucketsClient, +) from google.cloud.compute_v1.services.region_backend_services.client import ( RegionBackendServicesClient, ) @@ -169,6 +175,9 @@ from google.cloud.compute_v1.services.region_health_sources.client import ( RegionHealthSourcesClient, ) +from google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.client import ( + RegionInstanceGroupManagerResizeRequestsClient, +) from google.cloud.compute_v1.services.region_instance_group_managers.client import ( RegionInstanceGroupManagersClient, ) @@ -181,6 +190,9 @@ from google.cloud.compute_v1.services.region_instances.client import ( RegionInstancesClient, ) +from google.cloud.compute_v1.services.region_instant_snapshot_groups.client import ( + RegionInstantSnapshotGroupsClient, +) from google.cloud.compute_v1.services.region_instant_snapshots.client import ( RegionInstantSnapshotsClient, ) @@ -199,6 +211,12 @@ from google.cloud.compute_v1.services.region_security_policies.client import ( RegionSecurityPoliciesClient, ) +from google.cloud.compute_v1.services.region_snapshot_settings.client import ( + RegionSnapshotSettingsClient, +) +from google.cloud.compute_v1.services.region_snapshots.client import ( + RegionSnapshotsClient, +) from google.cloud.compute_v1.services.region_ssl_certificates.client import ( RegionSslCertificatesClient, ) @@ -278,6 +296,9 @@ from google.cloud.compute_v1.services.vpn_tunnels.client import VpnTunnelsClient from google.cloud.compute_v1.services.wire_groups.client import WireGroupsClient from google.cloud.compute_v1.services.zone_operations.client import ZoneOperationsClient +from google.cloud.compute_v1.services.zone_vm_extension_policies.client import ( + ZoneVmExtensionPoliciesClient, +) from google.cloud.compute_v1.services.zones.client import ZonesClient from google.cloud.compute_v1.types.compute import ( AbandonInstancesInstanceGroupManagerRequest, @@ -323,6 +344,7 @@ AggregatedListAcceleratorTypesRequest, AggregatedListAddressesRequest, AggregatedListAutoscalersRequest, + AggregatedListBackendBucketsRequest, AggregatedListBackendServicesRequest, AggregatedListDisksRequest, AggregatedListDiskTypesRequest, @@ -410,12 +432,15 @@ Backend, BackendBackendOrchestrationInfo, BackendBucket, + BackendBucketAggregatedList, BackendBucketCdnPolicy, BackendBucketCdnPolicyBypassCacheOnRequestHeader, BackendBucketCdnPolicyCacheKeyPolicy, BackendBucketCdnPolicyNegativeCachingPolicy, BackendBucketList, + BackendBucketListUsable, BackendBucketParams, + BackendBucketsScopedList, BackendBucketUsedBy, BackendCustomMetric, BackendService, @@ -468,12 +493,16 @@ BundledLocalSsds, CacheInvalidationRule, CacheKeyPolicy, + CachePolicy, + CachePolicyCacheKeyPolicy, + CachePolicyNegativeCachingPolicy, CalendarModeAdviceRequest, CalendarModeAdviceResponse, CalendarModeAdviceRpcRequest, CalendarModeRecommendation, CancelFutureReservationRequest, CancelInstanceGroupManagerResizeRequestRequest, + CancelRegionInstanceGroupManagerResizeRequestRequest, CircuitBreakers, CloneRulesFirewallPolicyRequest, CloneRulesNetworkFirewallPolicyRequest, @@ -481,11 +510,14 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentParams, CommitmentResourceStatus, CommitmentsScopedList, CompositeHealthCheck, CompositeHealthCheckAggregatedList, + CompositeHealthCheckHealth, CompositeHealthCheckList, + CompositeHealthChecksGetHealthResponseHealthSourceHealth, CompositeHealthChecksScopedList, ConfidentialInstanceConfig, ConnectionDraining, @@ -535,6 +567,7 @@ DeleteInstancesInstanceGroupManagerRequest, DeleteInstancesRegionInstanceGroupManagerRequest, DeleteInstanceTemplateRequest, + DeleteInstantSnapshotGroupRequest, DeleteInstantSnapshotRequest, DeleteInterconnectAttachmentGroupRequest, DeleteInterconnectAttachmentRequest, @@ -558,6 +591,7 @@ DeletePublicAdvertisedPrefixeRequest, DeletePublicDelegatedPrefixeRequest, DeleteRegionAutoscalerRequest, + DeleteRegionBackendBucketRequest, DeleteRegionBackendServiceRequest, DeleteRegionCompositeHealthCheckRequest, DeleteRegionDiskRequest, @@ -566,7 +600,9 @@ DeleteRegionHealthCheckServiceRequest, DeleteRegionHealthSourceRequest, DeleteRegionInstanceGroupManagerRequest, + DeleteRegionInstanceGroupManagerResizeRequestRequest, DeleteRegionInstanceTemplateRequest, + DeleteRegionInstantSnapshotGroupRequest, DeleteRegionInstantSnapshotRequest, DeleteRegionNetworkEndpointGroupRequest, DeleteRegionNetworkFirewallPolicyRequest, @@ -574,6 +610,7 @@ DeleteRegionOperationRequest, DeleteRegionOperationResponse, DeleteRegionSecurityPolicyRequest, + DeleteRegionSnapshotRequest, DeleteRegionSslCertificateRequest, DeleteRegionSslPolicyRequest, DeleteRegionTargetHttpProxyRequest, @@ -608,6 +645,7 @@ DeleteWireGroupRequest, DeleteZoneOperationRequest, DeleteZoneOperationResponse, + DeleteZoneVmExtensionPolicyRequest, Denied, DeprecateImageRequest, DeprecationStatus, @@ -637,6 +675,7 @@ DiskTypeAggregatedList, DiskTypeList, DiskTypesScopedList, + DiskUpdateKmsKeyRequest, DisplayDevice, DistributionPolicy, DistributionPolicyZoneConfiguration, @@ -679,6 +718,7 @@ ForwardingRulesScopedList, FutureReservation, FutureReservationCommitmentInfo, + FutureReservationParams, FutureReservationsAggregatedListResponse, FutureReservationsListResponse, FutureReservationSpecificSKUProperties, @@ -732,6 +772,8 @@ GetHealthBackendServiceRequest, GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, + GetHealthRegionCompositeHealthCheckRequest, + GetHealthRegionHealthSourceRequest, GetHealthTargetPoolRequest, GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, @@ -740,6 +782,7 @@ GetIamPolicyImageRequest, GetIamPolicyInstanceRequest, GetIamPolicyInstanceTemplateRequest, + GetIamPolicyInstantSnapshotGroupRequest, GetIamPolicyInstantSnapshotRequest, GetIamPolicyInterconnectAttachmentGroupRequest, GetIamPolicyInterconnectGroupRequest, @@ -749,10 +792,13 @@ GetIamPolicyNetworkFirewallPolicyRequest, GetIamPolicyNodeGroupRequest, GetIamPolicyNodeTemplateRequest, + GetIamPolicyRegionBackendBucketRequest, GetIamPolicyRegionBackendServiceRequest, GetIamPolicyRegionDiskRequest, + GetIamPolicyRegionInstantSnapshotGroupRequest, GetIamPolicyRegionInstantSnapshotRequest, GetIamPolicyRegionNetworkFirewallPolicyRequest, + GetIamPolicyRegionSnapshotRequest, GetIamPolicyReservationBlockRequest, GetIamPolicyReservationRequest, GetIamPolicyReservationSubBlockRequest, @@ -769,6 +815,7 @@ GetInstanceRequest, GetInstanceSettingRequest, GetInstanceTemplateRequest, + GetInstantSnapshotGroupRequest, GetInstantSnapshotRequest, GetInterconnectAttachmentGroupRequest, GetInterconnectAttachmentRequest, @@ -802,6 +849,7 @@ GetPublicAdvertisedPrefixeRequest, GetPublicDelegatedPrefixeRequest, GetRegionAutoscalerRequest, + GetRegionBackendBucketRequest, GetRegionBackendServiceRequest, GetRegionCommitmentRequest, GetRegionCompositeHealthCheckRequest, @@ -812,8 +860,10 @@ GetRegionHealthCheckServiceRequest, GetRegionHealthSourceRequest, GetRegionInstanceGroupManagerRequest, + GetRegionInstanceGroupManagerResizeRequestRequest, GetRegionInstanceGroupRequest, GetRegionInstanceTemplateRequest, + GetRegionInstantSnapshotGroupRequest, GetRegionInstantSnapshotRequest, GetRegionNetworkEndpointGroupRequest, GetRegionNetworkFirewallPolicyRequest, @@ -821,6 +871,8 @@ GetRegionOperationRequest, GetRegionRequest, GetRegionSecurityPolicyRequest, + GetRegionSnapshotRequest, + GetRegionSnapshotSettingRequest, GetRegionSslCertificateRequest, GetRegionSslPolicyRequest, GetRegionTargetHttpProxyRequest, @@ -875,6 +927,7 @@ GetXpnResourcesProjectsRequest, GetZoneOperationRequest, GetZoneRequest, + GetZoneVmExtensionPolicyRequest, GlobalAddressesMoveRequest, GlobalNetworkEndpointGroupsAttachEndpointsRequest, GlobalNetworkEndpointGroupsDetachEndpointsRequest, @@ -905,7 +958,10 @@ HealthChecksScopedList, HealthSource, HealthSourceAggregatedList, + HealthSourceHealth, HealthSourceList, + HealthSourcesGetHealthResponseSourceInfo, + HealthSourcesGetHealthResponseSourceInfoBackendInfo, HealthSourcesScopedList, HealthStatus, HealthStatusForNetworkEndpoint, @@ -954,6 +1010,7 @@ InsertInstanceGroupRequest, InsertInstanceRequest, InsertInstanceTemplateRequest, + InsertInstantSnapshotGroupRequest, InsertInstantSnapshotRequest, InsertInterconnectAttachmentGroupRequest, InsertInterconnectAttachmentRequest, @@ -973,6 +1030,7 @@ InsertPublicAdvertisedPrefixeRequest, InsertPublicDelegatedPrefixeRequest, InsertRegionAutoscalerRequest, + InsertRegionBackendBucketRequest, InsertRegionBackendServiceRequest, InsertRegionCommitmentRequest, InsertRegionCompositeHealthCheckRequest, @@ -982,12 +1040,15 @@ InsertRegionHealthCheckServiceRequest, InsertRegionHealthSourceRequest, InsertRegionInstanceGroupManagerRequest, + InsertRegionInstanceGroupManagerResizeRequestRequest, InsertRegionInstanceTemplateRequest, + InsertRegionInstantSnapshotGroupRequest, InsertRegionInstantSnapshotRequest, InsertRegionNetworkEndpointGroupRequest, InsertRegionNetworkFirewallPolicyRequest, InsertRegionNotificationEndpointRequest, InsertRegionSecurityPolicyRequest, + InsertRegionSnapshotRequest, InsertRegionSslCertificateRequest, InsertRegionSslPolicyRequest, InsertRegionTargetHttpProxyRequest, @@ -1017,6 +1078,7 @@ InsertVpnGatewayRequest, InsertVpnTunnelRequest, InsertWireGroupRequest, + InsertZoneVmExtensionPolicyRequest, Instance, InstanceAggregatedList, InstanceConsumptionData, @@ -1064,6 +1126,7 @@ InstanceGroupManagerStatusAllInstancesConfig, InstanceGroupManagerStatusBulkInstanceOperation, InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck, + InstanceGroupManagerStatusInstanceStatusSummary, InstanceGroupManagerStatusStateful, InstanceGroupManagerStatusStatefulPerInstanceConfigs, InstanceGroupManagerStatusVersionTarget, @@ -1112,6 +1175,10 @@ InstanceWithNamedPorts, InstantSnapshot, InstantSnapshotAggregatedList, + InstantSnapshotGroup, + InstantSnapshotGroupParameters, + InstantSnapshotGroupResourceStatus, + InstantSnapshotGroupSourceInfo, InstantSnapshotList, InstantSnapshotParams, InstantSnapshotResourceStatus, @@ -1237,6 +1304,8 @@ ListInstancesRegionInstanceGroupsRequest, ListInstancesRequest, ListInstanceTemplatesRequest, + ListInstantSnapshotGroups, + ListInstantSnapshotGroupsRequest, ListInstantSnapshotsRequest, ListInterconnectAttachmentGroupsRequest, ListInterconnectAttachmentsRequest, @@ -1273,6 +1342,7 @@ ListPublicDelegatedPrefixesRequest, ListReferrersInstancesRequest, ListRegionAutoscalersRequest, + ListRegionBackendBucketsRequest, ListRegionBackendServicesRequest, ListRegionCommitmentsRequest, ListRegionCompositeHealthChecksRequest, @@ -1282,15 +1352,18 @@ ListRegionHealthCheckServicesRequest, ListRegionHealthChecksRequest, ListRegionHealthSourcesRequest, + ListRegionInstanceGroupManagerResizeRequestsRequest, ListRegionInstanceGroupManagersRequest, ListRegionInstanceGroupsRequest, ListRegionInstanceTemplatesRequest, + ListRegionInstantSnapshotGroupsRequest, ListRegionInstantSnapshotsRequest, ListRegionNetworkEndpointGroupsRequest, ListRegionNetworkFirewallPoliciesRequest, ListRegionNotificationEndpointsRequest, ListRegionOperationsRequest, ListRegionSecurityPoliciesRequest, + ListRegionSnapshotsRequest, ListRegionsRequest, ListRegionSslCertificatesRequest, ListRegionSslPoliciesRequest, @@ -1324,7 +1397,9 @@ ListTargetTcpProxiesRequest, ListTargetVpnGatewaysRequest, ListUrlMapsRequest, + ListUsableBackendBucketsRequest, ListUsableBackendServicesRequest, + ListUsableRegionBackendBucketsRequest, ListUsableRegionBackendServicesRequest, ListUsableSubnetworksRequest, ListVpnGatewaysRequest, @@ -1333,6 +1408,7 @@ ListXpnHostsProjectsRequest, ListZoneOperationsRequest, ListZonesRequest, + ListZoneVmExtensionPoliciesRequest, LocalDisk, LocalizedMessage, LocationPolicy, @@ -1484,6 +1560,7 @@ PatchPublicAdvertisedPrefixeRequest, PatchPublicDelegatedPrefixeRequest, PatchRegionAutoscalerRequest, + PatchRegionBackendBucketRequest, PatchRegionBackendServiceRequest, PatchRegionCompositeHealthCheckRequest, PatchRegionHealthAggregationPolicyRequest, @@ -1493,6 +1570,7 @@ PatchRegionInstanceGroupManagerRequest, PatchRegionNetworkFirewallPolicyRequest, PatchRegionSecurityPolicyRequest, + PatchRegionSnapshotSettingRequest, PatchRegionSslPolicyRequest, PatchRegionTargetHttpsProxyRequest, PatchRegionUrlMapRequest, @@ -1566,10 +1644,12 @@ RegionDisksResizeRequest, RegionDisksStartAsyncReplicationRequest, RegionDiskTypeList, + RegionDiskUpdateKmsKeyRequest, RegionInstanceGroupList, RegionInstanceGroupManagerDeleteInstanceConfigReq, RegionInstanceGroupManagerList, RegionInstanceGroupManagerPatchInstanceConfigReq, + RegionInstanceGroupManagerResizeRequestsListResponse, RegionInstanceGroupManagersAbandonInstancesRequest, RegionInstanceGroupManagersApplyUpdatesRequest, RegionInstanceGroupManagersCreateInstancesRequest, @@ -1595,6 +1675,7 @@ RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, RegionSetPolicyRequest, + RegionSnapshotUpdateKmsKeyRequest, RegionTargetHttpsProxiesSetSslCertificatesRequest, RegionUrlMapsValidateRequest, RemoveAssociationFirewallPolicyRequest, @@ -1791,6 +1872,7 @@ SetIamPolicyImageRequest, SetIamPolicyInstanceRequest, SetIamPolicyInstanceTemplateRequest, + SetIamPolicyInstantSnapshotGroupRequest, SetIamPolicyInstantSnapshotRequest, SetIamPolicyInterconnectAttachmentGroupRequest, SetIamPolicyInterconnectGroupRequest, @@ -1800,10 +1882,13 @@ SetIamPolicyNetworkFirewallPolicyRequest, SetIamPolicyNodeGroupRequest, SetIamPolicyNodeTemplateRequest, + SetIamPolicyRegionBackendBucketRequest, SetIamPolicyRegionBackendServiceRequest, SetIamPolicyRegionDiskRequest, + SetIamPolicyRegionInstantSnapshotGroupRequest, SetIamPolicyRegionInstantSnapshotRequest, SetIamPolicyRegionNetworkFirewallPolicyRequest, + SetIamPolicyRegionSnapshotRequest, SetIamPolicyReservationBlockRequest, SetIamPolicyReservationRequest, SetIamPolicyReservationSubBlockRequest, @@ -1829,6 +1914,7 @@ SetLabelsRegionDiskRequest, SetLabelsRegionInstantSnapshotRequest, SetLabelsRegionSecurityPolicyRequest, + SetLabelsRegionSnapshotRequest, SetLabelsSecurityPolicyRequest, SetLabelsSnapshotRequest, SetLabelsTargetVpnGatewayRequest, @@ -1879,11 +1965,15 @@ SimulateMaintenanceEventInstanceRequest, SimulateMaintenanceEventNodeGroupRequest, Snapshot, + SnapshotGroupParameters, SnapshotList, SnapshotParams, SnapshotSettings, + SnapshotSettingsAccessLocation, + SnapshotSettingsAccessLocationAccessLocationPreference, SnapshotSettingsStorageLocationSettings, SnapshotSettingsStorageLocationSettingsStorageLocationPreference, + SnapshotUpdateKmsKeyRequest, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -2010,6 +2100,7 @@ TestIamPermissionsInstanceGroupRequest, TestIamPermissionsInstanceRequest, TestIamPermissionsInstanceTemplateRequest, + TestIamPermissionsInstantSnapshotGroupRequest, TestIamPermissionsInstantSnapshotRequest, TestIamPermissionsInterconnectAttachmentGroupRequest, TestIamPermissionsInterconnectGroupRequest, @@ -2023,6 +2114,7 @@ TestIamPermissionsNodeTemplateRequest, TestIamPermissionsPacketMirroringRequest, TestIamPermissionsRegionAutoscalerRequest, + TestIamPermissionsRegionBackendBucketRequest, TestIamPermissionsRegionBackendServiceRequest, TestIamPermissionsRegionCompositeHealthCheckRequest, TestIamPermissionsRegionDiskRequest, @@ -2031,9 +2123,11 @@ TestIamPermissionsRegionHealthCheckServiceRequest, TestIamPermissionsRegionHealthSourceRequest, TestIamPermissionsRegionInstanceGroupRequest, + TestIamPermissionsRegionInstantSnapshotGroupRequest, TestIamPermissionsRegionInstantSnapshotRequest, TestIamPermissionsRegionNetworkFirewallPolicyRequest, TestIamPermissionsRegionNotificationEndpointRequest, + TestIamPermissionsRegionSnapshotRequest, TestIamPermissionsReservationBlockRequest, TestIamPermissionsReservationRequest, TestIamPermissionsReservationSubBlockRequest, @@ -2063,6 +2157,10 @@ UpdateFutureReservationRequest, UpdateHealthCheckRequest, UpdateInstanceRequest, + UpdateKmsKeyDiskRequest, + UpdateKmsKeyRegionDiskRequest, + UpdateKmsKeyRegionSnapshotRequest, + UpdateKmsKeySnapshotRequest, UpdateLicenseRequest, UpdateNetworkInterfaceInstanceRequest, UpdatePeeringNetworkRequest, @@ -2082,6 +2180,7 @@ UpdateShieldedInstanceConfigInstanceRequest, UpdateStoragePoolRequest, UpdateUrlMapRequest, + UpdateZoneVmExtensionPolicyRequest, UrlMap, UrlMapList, UrlMapReference, @@ -2103,6 +2202,11 @@ VmEndpointNatMappingsInterfaceNatMappings, VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings, VmEndpointNatMappingsList, + VmExtensionPolicy, + VmExtensionPolicyExtensionPolicy, + VmExtensionPolicyInstanceSelector, + VmExtensionPolicyLabelSelector, + VmExtensionPolicyList, VpnGateway, VpnGatewayAggregatedList, VpnGatewayList, @@ -2181,6 +2285,7 @@ "InstancesClient", "InstanceSettingsServiceClient", "InstanceTemplatesClient", + "InstantSnapshotGroupsClient", "InstantSnapshotsClient", "InterconnectAttachmentGroupsClient", "InterconnectAttachmentsClient", @@ -2208,6 +2313,7 @@ "PublicAdvertisedPrefixesClient", "PublicDelegatedPrefixesClient", "RegionAutoscalersClient", + "RegionBackendBucketsClient", "RegionBackendServicesClient", "RegionCommitmentsClient", "RegionCompositeHealthChecksClient", @@ -2217,10 +2323,12 @@ "RegionHealthChecksClient", "RegionHealthCheckServicesClient", "RegionHealthSourcesClient", + "RegionInstanceGroupManagerResizeRequestsClient", "RegionInstanceGroupManagersClient", "RegionInstanceGroupsClient", "RegionInstancesClient", "RegionInstanceTemplatesClient", + "RegionInstantSnapshotGroupsClient", "RegionInstantSnapshotsClient", "RegionNetworkEndpointGroupsClient", "RegionNetworkFirewallPoliciesClient", @@ -2228,6 +2336,8 @@ "RegionOperationsClient", "RegionsClient", "RegionSecurityPoliciesClient", + "RegionSnapshotsClient", + "RegionSnapshotSettingsClient", "RegionSslCertificatesClient", "RegionSslPoliciesClient", "RegionTargetHttpProxiesClient", @@ -2265,6 +2375,7 @@ "WireGroupsClient", "ZoneOperationsClient", "ZonesClient", + "ZoneVmExtensionPoliciesClient", "AbandonInstancesInstanceGroupManagerRequest", "AbandonInstancesRegionInstanceGroupManagerRequest", "AcceleratorConfig", @@ -2308,6 +2419,7 @@ "AggregatedListAcceleratorTypesRequest", "AggregatedListAddressesRequest", "AggregatedListAutoscalersRequest", + "AggregatedListBackendBucketsRequest", "AggregatedListBackendServicesRequest", "AggregatedListDisksRequest", "AggregatedListDiskTypesRequest", @@ -2395,12 +2507,15 @@ "Backend", "BackendBackendOrchestrationInfo", "BackendBucket", + "BackendBucketAggregatedList", "BackendBucketCdnPolicy", "BackendBucketCdnPolicyBypassCacheOnRequestHeader", "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketListUsable", "BackendBucketParams", + "BackendBucketsScopedList", "BackendBucketUsedBy", "BackendCustomMetric", "BackendService", @@ -2453,12 +2568,16 @@ "BundledLocalSsds", "CacheInvalidationRule", "CacheKeyPolicy", + "CachePolicy", + "CachePolicyCacheKeyPolicy", + "CachePolicyNegativeCachingPolicy", "CalendarModeAdviceRequest", "CalendarModeAdviceResponse", "CalendarModeAdviceRpcRequest", "CalendarModeRecommendation", "CancelFutureReservationRequest", "CancelInstanceGroupManagerResizeRequestRequest", + "CancelRegionInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -2466,11 +2585,14 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConnectionDraining", @@ -2520,6 +2642,7 @@ "DeleteInstancesInstanceGroupManagerRequest", "DeleteInstancesRegionInstanceGroupManagerRequest", "DeleteInstanceTemplateRequest", + "DeleteInstantSnapshotGroupRequest", "DeleteInstantSnapshotRequest", "DeleteInterconnectAttachmentGroupRequest", "DeleteInterconnectAttachmentRequest", @@ -2543,6 +2666,7 @@ "DeletePublicAdvertisedPrefixeRequest", "DeletePublicDelegatedPrefixeRequest", "DeleteRegionAutoscalerRequest", + "DeleteRegionBackendBucketRequest", "DeleteRegionBackendServiceRequest", "DeleteRegionCompositeHealthCheckRequest", "DeleteRegionDiskRequest", @@ -2551,7 +2675,9 @@ "DeleteRegionHealthCheckServiceRequest", "DeleteRegionHealthSourceRequest", "DeleteRegionInstanceGroupManagerRequest", + "DeleteRegionInstanceGroupManagerResizeRequestRequest", "DeleteRegionInstanceTemplateRequest", + "DeleteRegionInstantSnapshotGroupRequest", "DeleteRegionInstantSnapshotRequest", "DeleteRegionNetworkEndpointGroupRequest", "DeleteRegionNetworkFirewallPolicyRequest", @@ -2559,6 +2685,7 @@ "DeleteRegionOperationRequest", "DeleteRegionOperationResponse", "DeleteRegionSecurityPolicyRequest", + "DeleteRegionSnapshotRequest", "DeleteRegionSslCertificateRequest", "DeleteRegionSslPolicyRequest", "DeleteRegionTargetHttpProxyRequest", @@ -2593,6 +2720,7 @@ "DeleteWireGroupRequest", "DeleteZoneOperationRequest", "DeleteZoneOperationResponse", + "DeleteZoneVmExtensionPolicyRequest", "Denied", "DeprecateImageRequest", "DeprecationStatus", @@ -2622,6 +2750,7 @@ "DiskTypeAggregatedList", "DiskTypeList", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisplayDevice", "DistributionPolicy", "DistributionPolicyZoneConfiguration", @@ -2664,6 +2793,7 @@ "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationsAggregatedListResponse", "FutureReservationsListResponse", "FutureReservationSpecificSKUProperties", @@ -2717,6 +2847,8 @@ "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -2725,6 +2857,7 @@ "GetIamPolicyImageRequest", "GetIamPolicyInstanceRequest", "GetIamPolicyInstanceTemplateRequest", + "GetIamPolicyInstantSnapshotGroupRequest", "GetIamPolicyInstantSnapshotRequest", "GetIamPolicyInterconnectAttachmentGroupRequest", "GetIamPolicyInterconnectGroupRequest", @@ -2734,10 +2867,13 @@ "GetIamPolicyNetworkFirewallPolicyRequest", "GetIamPolicyNodeGroupRequest", "GetIamPolicyNodeTemplateRequest", + "GetIamPolicyRegionBackendBucketRequest", "GetIamPolicyRegionBackendServiceRequest", "GetIamPolicyRegionDiskRequest", + "GetIamPolicyRegionInstantSnapshotGroupRequest", "GetIamPolicyRegionInstantSnapshotRequest", "GetIamPolicyRegionNetworkFirewallPolicyRequest", + "GetIamPolicyRegionSnapshotRequest", "GetIamPolicyReservationBlockRequest", "GetIamPolicyReservationRequest", "GetIamPolicyReservationSubBlockRequest", @@ -2754,6 +2890,7 @@ "GetInstanceRequest", "GetInstanceSettingRequest", "GetInstanceTemplateRequest", + "GetInstantSnapshotGroupRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentGroupRequest", "GetInterconnectAttachmentRequest", @@ -2787,6 +2924,7 @@ "GetPublicAdvertisedPrefixeRequest", "GetPublicDelegatedPrefixeRequest", "GetRegionAutoscalerRequest", + "GetRegionBackendBucketRequest", "GetRegionBackendServiceRequest", "GetRegionCommitmentRequest", "GetRegionCompositeHealthCheckRequest", @@ -2797,8 +2935,10 @@ "GetRegionHealthCheckServiceRequest", "GetRegionHealthSourceRequest", "GetRegionInstanceGroupManagerRequest", + "GetRegionInstanceGroupManagerResizeRequestRequest", "GetRegionInstanceGroupRequest", "GetRegionInstanceTemplateRequest", + "GetRegionInstantSnapshotGroupRequest", "GetRegionInstantSnapshotRequest", "GetRegionNetworkEndpointGroupRequest", "GetRegionNetworkFirewallPolicyRequest", @@ -2806,6 +2946,8 @@ "GetRegionOperationRequest", "GetRegionRequest", "GetRegionSecurityPolicyRequest", + "GetRegionSnapshotRequest", + "GetRegionSnapshotSettingRequest", "GetRegionSslCertificateRequest", "GetRegionSslPolicyRequest", "GetRegionTargetHttpProxyRequest", @@ -2860,6 +3002,7 @@ "GetXpnResourcesProjectsRequest", "GetZoneOperationRequest", "GetZoneRequest", + "GetZoneVmExtensionPolicyRequest", "GlobalAddressesMoveRequest", "GlobalNetworkEndpointGroupsAttachEndpointsRequest", "GlobalNetworkEndpointGroupsDetachEndpointsRequest", @@ -2890,7 +3033,10 @@ "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -2939,6 +3085,7 @@ "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", + "InsertInstantSnapshotGroupRequest", "InsertInstantSnapshotRequest", "InsertInterconnectAttachmentGroupRequest", "InsertInterconnectAttachmentRequest", @@ -2958,6 +3105,7 @@ "InsertPublicAdvertisedPrefixeRequest", "InsertPublicDelegatedPrefixeRequest", "InsertRegionAutoscalerRequest", + "InsertRegionBackendBucketRequest", "InsertRegionBackendServiceRequest", "InsertRegionCommitmentRequest", "InsertRegionCompositeHealthCheckRequest", @@ -2967,12 +3115,15 @@ "InsertRegionHealthCheckServiceRequest", "InsertRegionHealthSourceRequest", "InsertRegionInstanceGroupManagerRequest", + "InsertRegionInstanceGroupManagerResizeRequestRequest", "InsertRegionInstanceTemplateRequest", + "InsertRegionInstantSnapshotGroupRequest", "InsertRegionInstantSnapshotRequest", "InsertRegionNetworkEndpointGroupRequest", "InsertRegionNetworkFirewallPolicyRequest", "InsertRegionNotificationEndpointRequest", "InsertRegionSecurityPolicyRequest", + "InsertRegionSnapshotRequest", "InsertRegionSslCertificateRequest", "InsertRegionSslPolicyRequest", "InsertRegionTargetHttpProxyRequest", @@ -3002,6 +3153,7 @@ "InsertVpnGatewayRequest", "InsertVpnTunnelRequest", "InsertWireGroupRequest", + "InsertZoneVmExtensionPolicyRequest", "Instance", "InstanceAggregatedList", "InstanceConsumptionData", @@ -3049,6 +3201,7 @@ "InstanceGroupManagerStatusAllInstancesConfig", "InstanceGroupManagerStatusBulkInstanceOperation", "InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck", + "InstanceGroupManagerStatusInstanceStatusSummary", "InstanceGroupManagerStatusStateful", "InstanceGroupManagerStatusStatefulPerInstanceConfigs", "InstanceGroupManagerStatusVersionTarget", @@ -3097,6 +3250,10 @@ "InstanceWithNamedPorts", "InstantSnapshot", "InstantSnapshotAggregatedList", + "InstantSnapshotGroup", + "InstantSnapshotGroupParameters", + "InstantSnapshotGroupResourceStatus", + "InstantSnapshotGroupSourceInfo", "InstantSnapshotList", "InstantSnapshotParams", "InstantSnapshotResourceStatus", @@ -3222,6 +3379,8 @@ "ListInstancesRegionInstanceGroupsRequest", "ListInstancesRequest", "ListInstanceTemplatesRequest", + "ListInstantSnapshotGroups", + "ListInstantSnapshotGroupsRequest", "ListInstantSnapshotsRequest", "ListInterconnectAttachmentGroupsRequest", "ListInterconnectAttachmentsRequest", @@ -3258,6 +3417,7 @@ "ListPublicDelegatedPrefixesRequest", "ListReferrersInstancesRequest", "ListRegionAutoscalersRequest", + "ListRegionBackendBucketsRequest", "ListRegionBackendServicesRequest", "ListRegionCommitmentsRequest", "ListRegionCompositeHealthChecksRequest", @@ -3267,15 +3427,18 @@ "ListRegionHealthCheckServicesRequest", "ListRegionHealthChecksRequest", "ListRegionHealthSourcesRequest", + "ListRegionInstanceGroupManagerResizeRequestsRequest", "ListRegionInstanceGroupManagersRequest", "ListRegionInstanceGroupsRequest", "ListRegionInstanceTemplatesRequest", + "ListRegionInstantSnapshotGroupsRequest", "ListRegionInstantSnapshotsRequest", "ListRegionNetworkEndpointGroupsRequest", "ListRegionNetworkFirewallPoliciesRequest", "ListRegionNotificationEndpointsRequest", "ListRegionOperationsRequest", "ListRegionSecurityPoliciesRequest", + "ListRegionSnapshotsRequest", "ListRegionsRequest", "ListRegionSslCertificatesRequest", "ListRegionSslPoliciesRequest", @@ -3309,7 +3472,9 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendBucketsRequest", "ListUsableBackendServicesRequest", + "ListUsableRegionBackendBucketsRequest", "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", @@ -3318,6 +3483,7 @@ "ListXpnHostsProjectsRequest", "ListZoneOperationsRequest", "ListZonesRequest", + "ListZoneVmExtensionPoliciesRequest", "LocalDisk", "LocalizedMessage", "LocationPolicy", @@ -3469,6 +3635,7 @@ "PatchPublicAdvertisedPrefixeRequest", "PatchPublicDelegatedPrefixeRequest", "PatchRegionAutoscalerRequest", + "PatchRegionBackendBucketRequest", "PatchRegionBackendServiceRequest", "PatchRegionCompositeHealthCheckRequest", "PatchRegionHealthAggregationPolicyRequest", @@ -3478,6 +3645,7 @@ "PatchRegionInstanceGroupManagerRequest", "PatchRegionNetworkFirewallPolicyRequest", "PatchRegionSecurityPolicyRequest", + "PatchRegionSnapshotSettingRequest", "PatchRegionSslPolicyRequest", "PatchRegionTargetHttpsProxyRequest", "PatchRegionUrlMapRequest", @@ -3551,10 +3719,12 @@ "RegionDisksResizeRequest", "RegionDisksStartAsyncReplicationRequest", "RegionDiskTypeList", + "RegionDiskUpdateKmsKeyRequest", "RegionInstanceGroupList", "RegionInstanceGroupManagerDeleteInstanceConfigReq", "RegionInstanceGroupManagerList", "RegionInstanceGroupManagerPatchInstanceConfigReq", + "RegionInstanceGroupManagerResizeRequestsListResponse", "RegionInstanceGroupManagersAbandonInstancesRequest", "RegionInstanceGroupManagersApplyUpdatesRequest", "RegionInstanceGroupManagersCreateInstancesRequest", @@ -3580,6 +3750,7 @@ "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", "RegionSetPolicyRequest", + "RegionSnapshotUpdateKmsKeyRequest", "RegionTargetHttpsProxiesSetSslCertificatesRequest", "RegionUrlMapsValidateRequest", "RemoveAssociationFirewallPolicyRequest", @@ -3776,6 +3947,7 @@ "SetIamPolicyImageRequest", "SetIamPolicyInstanceRequest", "SetIamPolicyInstanceTemplateRequest", + "SetIamPolicyInstantSnapshotGroupRequest", "SetIamPolicyInstantSnapshotRequest", "SetIamPolicyInterconnectAttachmentGroupRequest", "SetIamPolicyInterconnectGroupRequest", @@ -3785,10 +3957,13 @@ "SetIamPolicyNetworkFirewallPolicyRequest", "SetIamPolicyNodeGroupRequest", "SetIamPolicyNodeTemplateRequest", + "SetIamPolicyRegionBackendBucketRequest", "SetIamPolicyRegionBackendServiceRequest", "SetIamPolicyRegionDiskRequest", + "SetIamPolicyRegionInstantSnapshotGroupRequest", "SetIamPolicyRegionInstantSnapshotRequest", "SetIamPolicyRegionNetworkFirewallPolicyRequest", + "SetIamPolicyRegionSnapshotRequest", "SetIamPolicyReservationBlockRequest", "SetIamPolicyReservationRequest", "SetIamPolicyReservationSubBlockRequest", @@ -3814,6 +3989,7 @@ "SetLabelsRegionDiskRequest", "SetLabelsRegionInstantSnapshotRequest", "SetLabelsRegionSecurityPolicyRequest", + "SetLabelsRegionSnapshotRequest", "SetLabelsSecurityPolicyRequest", "SetLabelsSnapshotRequest", "SetLabelsTargetVpnGatewayRequest", @@ -3864,11 +4040,15 @@ "SimulateMaintenanceEventInstanceRequest", "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", + "SnapshotGroupParameters", "SnapshotList", "SnapshotParams", "SnapshotSettings", + "SnapshotSettingsAccessLocation", + "SnapshotSettingsAccessLocationAccessLocationPreference", "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + "SnapshotUpdateKmsKeyRequest", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -3995,6 +4175,7 @@ "TestIamPermissionsInstanceGroupRequest", "TestIamPermissionsInstanceRequest", "TestIamPermissionsInstanceTemplateRequest", + "TestIamPermissionsInstantSnapshotGroupRequest", "TestIamPermissionsInstantSnapshotRequest", "TestIamPermissionsInterconnectAttachmentGroupRequest", "TestIamPermissionsInterconnectGroupRequest", @@ -4008,6 +4189,7 @@ "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", "TestIamPermissionsRegionAutoscalerRequest", + "TestIamPermissionsRegionBackendBucketRequest", "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionCompositeHealthCheckRequest", "TestIamPermissionsRegionDiskRequest", @@ -4016,9 +4198,11 @@ "TestIamPermissionsRegionHealthCheckServiceRequest", "TestIamPermissionsRegionHealthSourceRequest", "TestIamPermissionsRegionInstanceGroupRequest", + "TestIamPermissionsRegionInstantSnapshotGroupRequest", "TestIamPermissionsRegionInstantSnapshotRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsRegionNotificationEndpointRequest", + "TestIamPermissionsRegionSnapshotRequest", "TestIamPermissionsReservationBlockRequest", "TestIamPermissionsReservationRequest", "TestIamPermissionsReservationSubBlockRequest", @@ -4048,6 +4232,10 @@ "UpdateFutureReservationRequest", "UpdateHealthCheckRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNetworkInterfaceInstanceRequest", "UpdatePeeringNetworkRequest", @@ -4067,6 +4255,7 @@ "UpdateShieldedInstanceConfigInstanceRequest", "UpdateStoragePoolRequest", "UpdateUrlMapRequest", + "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", "UrlMapReference", @@ -4088,6 +4277,11 @@ "VmEndpointNatMappingsInterfaceNatMappings", "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", + "VmExtensionPolicy", + "VmExtensionPolicyExtensionPolicy", + "VmExtensionPolicyInstanceSelector", + "VmExtensionPolicyLabelSelector", + "VmExtensionPolicyList", "VpnGateway", "VpnGatewayAggregatedList", "VpnGatewayList", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py index 740759c9cfd8..d23751d9f2e5 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/__init__.py @@ -62,6 +62,7 @@ from .services.instance_settings_service import InstanceSettingsServiceClient from .services.instance_templates import InstanceTemplatesClient from .services.instances import InstancesClient +from .services.instant_snapshot_groups import InstantSnapshotGroupsClient from .services.instant_snapshots import InstantSnapshotsClient from .services.interconnect_attachment_groups import InterconnectAttachmentGroupsClient from .services.interconnect_attachments import InterconnectAttachmentsClient @@ -89,6 +90,7 @@ from .services.public_advertised_prefixes import PublicAdvertisedPrefixesClient from .services.public_delegated_prefixes import PublicDelegatedPrefixesClient from .services.region_autoscalers import RegionAutoscalersClient +from .services.region_backend_buckets import RegionBackendBucketsClient from .services.region_backend_services import RegionBackendServicesClient from .services.region_commitments import RegionCommitmentsClient from .services.region_composite_health_checks import RegionCompositeHealthChecksClient @@ -100,10 +102,14 @@ from .services.region_health_check_services import RegionHealthCheckServicesClient from .services.region_health_checks import RegionHealthChecksClient from .services.region_health_sources import RegionHealthSourcesClient +from .services.region_instance_group_manager_resize_requests import ( + RegionInstanceGroupManagerResizeRequestsClient, +) from .services.region_instance_group_managers import RegionInstanceGroupManagersClient from .services.region_instance_groups import RegionInstanceGroupsClient from .services.region_instance_templates import RegionInstanceTemplatesClient from .services.region_instances import RegionInstancesClient +from .services.region_instant_snapshot_groups import RegionInstantSnapshotGroupsClient from .services.region_instant_snapshots import RegionInstantSnapshotsClient from .services.region_network_endpoint_groups import RegionNetworkEndpointGroupsClient from .services.region_network_firewall_policies import ( @@ -112,6 +118,8 @@ from .services.region_notification_endpoints import RegionNotificationEndpointsClient from .services.region_operations import RegionOperationsClient from .services.region_security_policies import RegionSecurityPoliciesClient +from .services.region_snapshot_settings import RegionSnapshotSettingsClient +from .services.region_snapshots import RegionSnapshotsClient from .services.region_ssl_certificates import RegionSslCertificatesClient from .services.region_ssl_policies import RegionSslPoliciesClient from .services.region_target_http_proxies import RegionTargetHttpProxiesClient @@ -149,6 +157,7 @@ from .services.vpn_tunnels import VpnTunnelsClient from .services.wire_groups import WireGroupsClient from .services.zone_operations import ZoneOperationsClient +from .services.zone_vm_extension_policies import ZoneVmExtensionPoliciesClient from .services.zones import ZonesClient from .types.compute import ( AbandonInstancesInstanceGroupManagerRequest, @@ -194,6 +203,7 @@ AggregatedListAcceleratorTypesRequest, AggregatedListAddressesRequest, AggregatedListAutoscalersRequest, + AggregatedListBackendBucketsRequest, AggregatedListBackendServicesRequest, AggregatedListDisksRequest, AggregatedListDiskTypesRequest, @@ -281,12 +291,15 @@ Backend, BackendBackendOrchestrationInfo, BackendBucket, + BackendBucketAggregatedList, BackendBucketCdnPolicy, BackendBucketCdnPolicyBypassCacheOnRequestHeader, BackendBucketCdnPolicyCacheKeyPolicy, BackendBucketCdnPolicyNegativeCachingPolicy, BackendBucketList, + BackendBucketListUsable, BackendBucketParams, + BackendBucketsScopedList, BackendBucketUsedBy, BackendCustomMetric, BackendService, @@ -339,12 +352,16 @@ BundledLocalSsds, CacheInvalidationRule, CacheKeyPolicy, + CachePolicy, + CachePolicyCacheKeyPolicy, + CachePolicyNegativeCachingPolicy, CalendarModeAdviceRequest, CalendarModeAdviceResponse, CalendarModeAdviceRpcRequest, CalendarModeRecommendation, CancelFutureReservationRequest, CancelInstanceGroupManagerResizeRequestRequest, + CancelRegionInstanceGroupManagerResizeRequestRequest, CircuitBreakers, CloneRulesFirewallPolicyRequest, CloneRulesNetworkFirewallPolicyRequest, @@ -352,11 +369,14 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentParams, CommitmentResourceStatus, CommitmentsScopedList, CompositeHealthCheck, CompositeHealthCheckAggregatedList, + CompositeHealthCheckHealth, CompositeHealthCheckList, + CompositeHealthChecksGetHealthResponseHealthSourceHealth, CompositeHealthChecksScopedList, ConfidentialInstanceConfig, ConnectionDraining, @@ -406,6 +426,7 @@ DeleteInstancesInstanceGroupManagerRequest, DeleteInstancesRegionInstanceGroupManagerRequest, DeleteInstanceTemplateRequest, + DeleteInstantSnapshotGroupRequest, DeleteInstantSnapshotRequest, DeleteInterconnectAttachmentGroupRequest, DeleteInterconnectAttachmentRequest, @@ -429,6 +450,7 @@ DeletePublicAdvertisedPrefixeRequest, DeletePublicDelegatedPrefixeRequest, DeleteRegionAutoscalerRequest, + DeleteRegionBackendBucketRequest, DeleteRegionBackendServiceRequest, DeleteRegionCompositeHealthCheckRequest, DeleteRegionDiskRequest, @@ -437,7 +459,9 @@ DeleteRegionHealthCheckServiceRequest, DeleteRegionHealthSourceRequest, DeleteRegionInstanceGroupManagerRequest, + DeleteRegionInstanceGroupManagerResizeRequestRequest, DeleteRegionInstanceTemplateRequest, + DeleteRegionInstantSnapshotGroupRequest, DeleteRegionInstantSnapshotRequest, DeleteRegionNetworkEndpointGroupRequest, DeleteRegionNetworkFirewallPolicyRequest, @@ -445,6 +469,7 @@ DeleteRegionOperationRequest, DeleteRegionOperationResponse, DeleteRegionSecurityPolicyRequest, + DeleteRegionSnapshotRequest, DeleteRegionSslCertificateRequest, DeleteRegionSslPolicyRequest, DeleteRegionTargetHttpProxyRequest, @@ -479,6 +504,7 @@ DeleteWireGroupRequest, DeleteZoneOperationRequest, DeleteZoneOperationResponse, + DeleteZoneVmExtensionPolicyRequest, Denied, DeprecateImageRequest, DeprecationStatus, @@ -508,6 +534,7 @@ DiskTypeAggregatedList, DiskTypeList, DiskTypesScopedList, + DiskUpdateKmsKeyRequest, DisplayDevice, DistributionPolicy, DistributionPolicyZoneConfiguration, @@ -550,6 +577,7 @@ ForwardingRulesScopedList, FutureReservation, FutureReservationCommitmentInfo, + FutureReservationParams, FutureReservationsAggregatedListResponse, FutureReservationsListResponse, FutureReservationSpecificSKUProperties, @@ -603,6 +631,8 @@ GetHealthBackendServiceRequest, GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, + GetHealthRegionCompositeHealthCheckRequest, + GetHealthRegionHealthSourceRequest, GetHealthTargetPoolRequest, GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, @@ -611,6 +641,7 @@ GetIamPolicyImageRequest, GetIamPolicyInstanceRequest, GetIamPolicyInstanceTemplateRequest, + GetIamPolicyInstantSnapshotGroupRequest, GetIamPolicyInstantSnapshotRequest, GetIamPolicyInterconnectAttachmentGroupRequest, GetIamPolicyInterconnectGroupRequest, @@ -620,10 +651,13 @@ GetIamPolicyNetworkFirewallPolicyRequest, GetIamPolicyNodeGroupRequest, GetIamPolicyNodeTemplateRequest, + GetIamPolicyRegionBackendBucketRequest, GetIamPolicyRegionBackendServiceRequest, GetIamPolicyRegionDiskRequest, + GetIamPolicyRegionInstantSnapshotGroupRequest, GetIamPolicyRegionInstantSnapshotRequest, GetIamPolicyRegionNetworkFirewallPolicyRequest, + GetIamPolicyRegionSnapshotRequest, GetIamPolicyReservationBlockRequest, GetIamPolicyReservationRequest, GetIamPolicyReservationSubBlockRequest, @@ -640,6 +674,7 @@ GetInstanceRequest, GetInstanceSettingRequest, GetInstanceTemplateRequest, + GetInstantSnapshotGroupRequest, GetInstantSnapshotRequest, GetInterconnectAttachmentGroupRequest, GetInterconnectAttachmentRequest, @@ -673,6 +708,7 @@ GetPublicAdvertisedPrefixeRequest, GetPublicDelegatedPrefixeRequest, GetRegionAutoscalerRequest, + GetRegionBackendBucketRequest, GetRegionBackendServiceRequest, GetRegionCommitmentRequest, GetRegionCompositeHealthCheckRequest, @@ -683,8 +719,10 @@ GetRegionHealthCheckServiceRequest, GetRegionHealthSourceRequest, GetRegionInstanceGroupManagerRequest, + GetRegionInstanceGroupManagerResizeRequestRequest, GetRegionInstanceGroupRequest, GetRegionInstanceTemplateRequest, + GetRegionInstantSnapshotGroupRequest, GetRegionInstantSnapshotRequest, GetRegionNetworkEndpointGroupRequest, GetRegionNetworkFirewallPolicyRequest, @@ -692,6 +730,8 @@ GetRegionOperationRequest, GetRegionRequest, GetRegionSecurityPolicyRequest, + GetRegionSnapshotRequest, + GetRegionSnapshotSettingRequest, GetRegionSslCertificateRequest, GetRegionSslPolicyRequest, GetRegionTargetHttpProxyRequest, @@ -746,6 +786,7 @@ GetXpnResourcesProjectsRequest, GetZoneOperationRequest, GetZoneRequest, + GetZoneVmExtensionPolicyRequest, GlobalAddressesMoveRequest, GlobalNetworkEndpointGroupsAttachEndpointsRequest, GlobalNetworkEndpointGroupsDetachEndpointsRequest, @@ -776,7 +817,10 @@ HealthChecksScopedList, HealthSource, HealthSourceAggregatedList, + HealthSourceHealth, HealthSourceList, + HealthSourcesGetHealthResponseSourceInfo, + HealthSourcesGetHealthResponseSourceInfoBackendInfo, HealthSourcesScopedList, HealthStatus, HealthStatusForNetworkEndpoint, @@ -825,6 +869,7 @@ InsertInstanceGroupRequest, InsertInstanceRequest, InsertInstanceTemplateRequest, + InsertInstantSnapshotGroupRequest, InsertInstantSnapshotRequest, InsertInterconnectAttachmentGroupRequest, InsertInterconnectAttachmentRequest, @@ -844,6 +889,7 @@ InsertPublicAdvertisedPrefixeRequest, InsertPublicDelegatedPrefixeRequest, InsertRegionAutoscalerRequest, + InsertRegionBackendBucketRequest, InsertRegionBackendServiceRequest, InsertRegionCommitmentRequest, InsertRegionCompositeHealthCheckRequest, @@ -853,12 +899,15 @@ InsertRegionHealthCheckServiceRequest, InsertRegionHealthSourceRequest, InsertRegionInstanceGroupManagerRequest, + InsertRegionInstanceGroupManagerResizeRequestRequest, InsertRegionInstanceTemplateRequest, + InsertRegionInstantSnapshotGroupRequest, InsertRegionInstantSnapshotRequest, InsertRegionNetworkEndpointGroupRequest, InsertRegionNetworkFirewallPolicyRequest, InsertRegionNotificationEndpointRequest, InsertRegionSecurityPolicyRequest, + InsertRegionSnapshotRequest, InsertRegionSslCertificateRequest, InsertRegionSslPolicyRequest, InsertRegionTargetHttpProxyRequest, @@ -888,6 +937,7 @@ InsertVpnGatewayRequest, InsertVpnTunnelRequest, InsertWireGroupRequest, + InsertZoneVmExtensionPolicyRequest, Instance, InstanceAggregatedList, InstanceConsumptionData, @@ -935,6 +985,7 @@ InstanceGroupManagerStatusAllInstancesConfig, InstanceGroupManagerStatusBulkInstanceOperation, InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck, + InstanceGroupManagerStatusInstanceStatusSummary, InstanceGroupManagerStatusStateful, InstanceGroupManagerStatusStatefulPerInstanceConfigs, InstanceGroupManagerStatusVersionTarget, @@ -983,6 +1034,10 @@ InstanceWithNamedPorts, InstantSnapshot, InstantSnapshotAggregatedList, + InstantSnapshotGroup, + InstantSnapshotGroupParameters, + InstantSnapshotGroupResourceStatus, + InstantSnapshotGroupSourceInfo, InstantSnapshotList, InstantSnapshotParams, InstantSnapshotResourceStatus, @@ -1108,6 +1163,8 @@ ListInstancesRegionInstanceGroupsRequest, ListInstancesRequest, ListInstanceTemplatesRequest, + ListInstantSnapshotGroups, + ListInstantSnapshotGroupsRequest, ListInstantSnapshotsRequest, ListInterconnectAttachmentGroupsRequest, ListInterconnectAttachmentsRequest, @@ -1144,6 +1201,7 @@ ListPublicDelegatedPrefixesRequest, ListReferrersInstancesRequest, ListRegionAutoscalersRequest, + ListRegionBackendBucketsRequest, ListRegionBackendServicesRequest, ListRegionCommitmentsRequest, ListRegionCompositeHealthChecksRequest, @@ -1153,15 +1211,18 @@ ListRegionHealthCheckServicesRequest, ListRegionHealthChecksRequest, ListRegionHealthSourcesRequest, + ListRegionInstanceGroupManagerResizeRequestsRequest, ListRegionInstanceGroupManagersRequest, ListRegionInstanceGroupsRequest, ListRegionInstanceTemplatesRequest, + ListRegionInstantSnapshotGroupsRequest, ListRegionInstantSnapshotsRequest, ListRegionNetworkEndpointGroupsRequest, ListRegionNetworkFirewallPoliciesRequest, ListRegionNotificationEndpointsRequest, ListRegionOperationsRequest, ListRegionSecurityPoliciesRequest, + ListRegionSnapshotsRequest, ListRegionsRequest, ListRegionSslCertificatesRequest, ListRegionSslPoliciesRequest, @@ -1195,7 +1256,9 @@ ListTargetTcpProxiesRequest, ListTargetVpnGatewaysRequest, ListUrlMapsRequest, + ListUsableBackendBucketsRequest, ListUsableBackendServicesRequest, + ListUsableRegionBackendBucketsRequest, ListUsableRegionBackendServicesRequest, ListUsableSubnetworksRequest, ListVpnGatewaysRequest, @@ -1204,6 +1267,7 @@ ListXpnHostsProjectsRequest, ListZoneOperationsRequest, ListZonesRequest, + ListZoneVmExtensionPoliciesRequest, LocalDisk, LocalizedMessage, LocationPolicy, @@ -1355,6 +1419,7 @@ PatchPublicAdvertisedPrefixeRequest, PatchPublicDelegatedPrefixeRequest, PatchRegionAutoscalerRequest, + PatchRegionBackendBucketRequest, PatchRegionBackendServiceRequest, PatchRegionCompositeHealthCheckRequest, PatchRegionHealthAggregationPolicyRequest, @@ -1364,6 +1429,7 @@ PatchRegionInstanceGroupManagerRequest, PatchRegionNetworkFirewallPolicyRequest, PatchRegionSecurityPolicyRequest, + PatchRegionSnapshotSettingRequest, PatchRegionSslPolicyRequest, PatchRegionTargetHttpsProxyRequest, PatchRegionUrlMapRequest, @@ -1437,10 +1503,12 @@ RegionDisksResizeRequest, RegionDisksStartAsyncReplicationRequest, RegionDiskTypeList, + RegionDiskUpdateKmsKeyRequest, RegionInstanceGroupList, RegionInstanceGroupManagerDeleteInstanceConfigReq, RegionInstanceGroupManagerList, RegionInstanceGroupManagerPatchInstanceConfigReq, + RegionInstanceGroupManagerResizeRequestsListResponse, RegionInstanceGroupManagersAbandonInstancesRequest, RegionInstanceGroupManagersApplyUpdatesRequest, RegionInstanceGroupManagersCreateInstancesRequest, @@ -1466,6 +1534,7 @@ RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, RegionSetPolicyRequest, + RegionSnapshotUpdateKmsKeyRequest, RegionTargetHttpsProxiesSetSslCertificatesRequest, RegionUrlMapsValidateRequest, RemoveAssociationFirewallPolicyRequest, @@ -1662,6 +1731,7 @@ SetIamPolicyImageRequest, SetIamPolicyInstanceRequest, SetIamPolicyInstanceTemplateRequest, + SetIamPolicyInstantSnapshotGroupRequest, SetIamPolicyInstantSnapshotRequest, SetIamPolicyInterconnectAttachmentGroupRequest, SetIamPolicyInterconnectGroupRequest, @@ -1671,10 +1741,13 @@ SetIamPolicyNetworkFirewallPolicyRequest, SetIamPolicyNodeGroupRequest, SetIamPolicyNodeTemplateRequest, + SetIamPolicyRegionBackendBucketRequest, SetIamPolicyRegionBackendServiceRequest, SetIamPolicyRegionDiskRequest, + SetIamPolicyRegionInstantSnapshotGroupRequest, SetIamPolicyRegionInstantSnapshotRequest, SetIamPolicyRegionNetworkFirewallPolicyRequest, + SetIamPolicyRegionSnapshotRequest, SetIamPolicyReservationBlockRequest, SetIamPolicyReservationRequest, SetIamPolicyReservationSubBlockRequest, @@ -1700,6 +1773,7 @@ SetLabelsRegionDiskRequest, SetLabelsRegionInstantSnapshotRequest, SetLabelsRegionSecurityPolicyRequest, + SetLabelsRegionSnapshotRequest, SetLabelsSecurityPolicyRequest, SetLabelsSnapshotRequest, SetLabelsTargetVpnGatewayRequest, @@ -1750,11 +1824,15 @@ SimulateMaintenanceEventInstanceRequest, SimulateMaintenanceEventNodeGroupRequest, Snapshot, + SnapshotGroupParameters, SnapshotList, SnapshotParams, SnapshotSettings, + SnapshotSettingsAccessLocation, + SnapshotSettingsAccessLocationAccessLocationPreference, SnapshotSettingsStorageLocationSettings, SnapshotSettingsStorageLocationSettingsStorageLocationPreference, + SnapshotUpdateKmsKeyRequest, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -1881,6 +1959,7 @@ TestIamPermissionsInstanceGroupRequest, TestIamPermissionsInstanceRequest, TestIamPermissionsInstanceTemplateRequest, + TestIamPermissionsInstantSnapshotGroupRequest, TestIamPermissionsInstantSnapshotRequest, TestIamPermissionsInterconnectAttachmentGroupRequest, TestIamPermissionsInterconnectGroupRequest, @@ -1894,6 +1973,7 @@ TestIamPermissionsNodeTemplateRequest, TestIamPermissionsPacketMirroringRequest, TestIamPermissionsRegionAutoscalerRequest, + TestIamPermissionsRegionBackendBucketRequest, TestIamPermissionsRegionBackendServiceRequest, TestIamPermissionsRegionCompositeHealthCheckRequest, TestIamPermissionsRegionDiskRequest, @@ -1902,9 +1982,11 @@ TestIamPermissionsRegionHealthCheckServiceRequest, TestIamPermissionsRegionHealthSourceRequest, TestIamPermissionsRegionInstanceGroupRequest, + TestIamPermissionsRegionInstantSnapshotGroupRequest, TestIamPermissionsRegionInstantSnapshotRequest, TestIamPermissionsRegionNetworkFirewallPolicyRequest, TestIamPermissionsRegionNotificationEndpointRequest, + TestIamPermissionsRegionSnapshotRequest, TestIamPermissionsReservationBlockRequest, TestIamPermissionsReservationRequest, TestIamPermissionsReservationSubBlockRequest, @@ -1934,6 +2016,10 @@ UpdateFutureReservationRequest, UpdateHealthCheckRequest, UpdateInstanceRequest, + UpdateKmsKeyDiskRequest, + UpdateKmsKeyRegionDiskRequest, + UpdateKmsKeyRegionSnapshotRequest, + UpdateKmsKeySnapshotRequest, UpdateLicenseRequest, UpdateNetworkInterfaceInstanceRequest, UpdatePeeringNetworkRequest, @@ -1953,6 +2039,7 @@ UpdateShieldedInstanceConfigInstanceRequest, UpdateStoragePoolRequest, UpdateUrlMapRequest, + UpdateZoneVmExtensionPolicyRequest, UrlMap, UrlMapList, UrlMapReference, @@ -1974,6 +2061,11 @@ VmEndpointNatMappingsInterfaceNatMappings, VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings, VmEndpointNatMappingsList, + VmExtensionPolicy, + VmExtensionPolicyExtensionPolicy, + VmExtensionPolicyInstanceSelector, + VmExtensionPolicyLabelSelector, + VmExtensionPolicyList, VpnGateway, VpnGatewayAggregatedList, VpnGatewayList, @@ -2164,6 +2256,7 @@ def _get_version(dependency_name): "AggregatedListAcceleratorTypesRequest", "AggregatedListAddressesRequest", "AggregatedListAutoscalersRequest", + "AggregatedListBackendBucketsRequest", "AggregatedListBackendServicesRequest", "AggregatedListDiskTypesRequest", "AggregatedListDisksRequest", @@ -2251,14 +2344,17 @@ def _get_version(dependency_name): "Backend", "BackendBackendOrchestrationInfo", "BackendBucket", + "BackendBucketAggregatedList", "BackendBucketCdnPolicy", "BackendBucketCdnPolicyBypassCacheOnRequestHeader", "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketListUsable", "BackendBucketParams", "BackendBucketUsedBy", "BackendBucketsClient", + "BackendBucketsScopedList", "BackendCustomMetric", "BackendService", "BackendServiceAggregatedList", @@ -2311,12 +2407,16 @@ def _get_version(dependency_name): "BundledLocalSsds", "CacheInvalidationRule", "CacheKeyPolicy", + "CachePolicy", + "CachePolicyCacheKeyPolicy", + "CachePolicyNegativeCachingPolicy", "CalendarModeAdviceRequest", "CalendarModeAdviceResponse", "CalendarModeAdviceRpcRequest", "CalendarModeRecommendation", "CancelFutureReservationRequest", "CancelInstanceGroupManagerResizeRequestRequest", + "CancelRegionInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -2324,11 +2424,14 @@ def _get_version(dependency_name): "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConnectionDraining", @@ -2379,6 +2482,7 @@ def _get_version(dependency_name): "DeleteInstanceTemplateRequest", "DeleteInstancesInstanceGroupManagerRequest", "DeleteInstancesRegionInstanceGroupManagerRequest", + "DeleteInstantSnapshotGroupRequest", "DeleteInstantSnapshotRequest", "DeleteInterconnectAttachmentGroupRequest", "DeleteInterconnectAttachmentRequest", @@ -2402,6 +2506,7 @@ def _get_version(dependency_name): "DeletePublicAdvertisedPrefixeRequest", "DeletePublicDelegatedPrefixeRequest", "DeleteRegionAutoscalerRequest", + "DeleteRegionBackendBucketRequest", "DeleteRegionBackendServiceRequest", "DeleteRegionCompositeHealthCheckRequest", "DeleteRegionDiskRequest", @@ -2410,7 +2515,9 @@ def _get_version(dependency_name): "DeleteRegionHealthCheckServiceRequest", "DeleteRegionHealthSourceRequest", "DeleteRegionInstanceGroupManagerRequest", + "DeleteRegionInstanceGroupManagerResizeRequestRequest", "DeleteRegionInstanceTemplateRequest", + "DeleteRegionInstantSnapshotGroupRequest", "DeleteRegionInstantSnapshotRequest", "DeleteRegionNetworkEndpointGroupRequest", "DeleteRegionNetworkFirewallPolicyRequest", @@ -2418,6 +2525,7 @@ def _get_version(dependency_name): "DeleteRegionOperationRequest", "DeleteRegionOperationResponse", "DeleteRegionSecurityPolicyRequest", + "DeleteRegionSnapshotRequest", "DeleteRegionSslCertificateRequest", "DeleteRegionSslPolicyRequest", "DeleteRegionTargetHttpProxyRequest", @@ -2452,6 +2560,7 @@ def _get_version(dependency_name): "DeleteWireGroupRequest", "DeleteZoneOperationRequest", "DeleteZoneOperationResponse", + "DeleteZoneVmExtensionPolicyRequest", "Denied", "DeprecateImageRequest", "DeprecationStatus", @@ -2476,6 +2585,7 @@ def _get_version(dependency_name): "DiskTypeList", "DiskTypesClient", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisksAddResourcePoliciesRequest", "DisksClient", "DisksRemoveResourcePoliciesRequest", @@ -2529,6 +2639,7 @@ def _get_version(dependency_name): "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationSpecificSKUProperties", "FutureReservationStatus", "FutureReservationStatusExistingMatchingUsageInfo", @@ -2585,6 +2696,8 @@ def _get_version(dependency_name): "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -2593,6 +2706,7 @@ def _get_version(dependency_name): "GetIamPolicyImageRequest", "GetIamPolicyInstanceRequest", "GetIamPolicyInstanceTemplateRequest", + "GetIamPolicyInstantSnapshotGroupRequest", "GetIamPolicyInstantSnapshotRequest", "GetIamPolicyInterconnectAttachmentGroupRequest", "GetIamPolicyInterconnectGroupRequest", @@ -2602,10 +2716,13 @@ def _get_version(dependency_name): "GetIamPolicyNetworkFirewallPolicyRequest", "GetIamPolicyNodeGroupRequest", "GetIamPolicyNodeTemplateRequest", + "GetIamPolicyRegionBackendBucketRequest", "GetIamPolicyRegionBackendServiceRequest", "GetIamPolicyRegionDiskRequest", + "GetIamPolicyRegionInstantSnapshotGroupRequest", "GetIamPolicyRegionInstantSnapshotRequest", "GetIamPolicyRegionNetworkFirewallPolicyRequest", + "GetIamPolicyRegionSnapshotRequest", "GetIamPolicyReservationBlockRequest", "GetIamPolicyReservationRequest", "GetIamPolicyReservationSubBlockRequest", @@ -2622,6 +2739,7 @@ def _get_version(dependency_name): "GetInstanceRequest", "GetInstanceSettingRequest", "GetInstanceTemplateRequest", + "GetInstantSnapshotGroupRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentGroupRequest", "GetInterconnectAttachmentRequest", @@ -2655,6 +2773,7 @@ def _get_version(dependency_name): "GetPublicAdvertisedPrefixeRequest", "GetPublicDelegatedPrefixeRequest", "GetRegionAutoscalerRequest", + "GetRegionBackendBucketRequest", "GetRegionBackendServiceRequest", "GetRegionCommitmentRequest", "GetRegionCompositeHealthCheckRequest", @@ -2665,8 +2784,10 @@ def _get_version(dependency_name): "GetRegionHealthCheckServiceRequest", "GetRegionHealthSourceRequest", "GetRegionInstanceGroupManagerRequest", + "GetRegionInstanceGroupManagerResizeRequestRequest", "GetRegionInstanceGroupRequest", "GetRegionInstanceTemplateRequest", + "GetRegionInstantSnapshotGroupRequest", "GetRegionInstantSnapshotRequest", "GetRegionNetworkEndpointGroupRequest", "GetRegionNetworkFirewallPolicyRequest", @@ -2674,6 +2795,8 @@ def _get_version(dependency_name): "GetRegionOperationRequest", "GetRegionRequest", "GetRegionSecurityPolicyRequest", + "GetRegionSnapshotRequest", + "GetRegionSnapshotSettingRequest", "GetRegionSslCertificateRequest", "GetRegionSslPolicyRequest", "GetRegionTargetHttpProxyRequest", @@ -2728,6 +2851,7 @@ def _get_version(dependency_name): "GetXpnResourcesProjectsRequest", "GetZoneOperationRequest", "GetZoneRequest", + "GetZoneVmExtensionPolicyRequest", "GlobalAddressesClient", "GlobalAddressesMoveRequest", "GlobalForwardingRulesClient", @@ -2766,7 +2890,10 @@ def _get_version(dependency_name): "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -2814,6 +2941,7 @@ def _get_version(dependency_name): "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", + "InsertInstantSnapshotGroupRequest", "InsertInstantSnapshotRequest", "InsertInterconnectAttachmentGroupRequest", "InsertInterconnectAttachmentRequest", @@ -2833,6 +2961,7 @@ def _get_version(dependency_name): "InsertPublicAdvertisedPrefixeRequest", "InsertPublicDelegatedPrefixeRequest", "InsertRegionAutoscalerRequest", + "InsertRegionBackendBucketRequest", "InsertRegionBackendServiceRequest", "InsertRegionCommitmentRequest", "InsertRegionCompositeHealthCheckRequest", @@ -2842,12 +2971,15 @@ def _get_version(dependency_name): "InsertRegionHealthCheckServiceRequest", "InsertRegionHealthSourceRequest", "InsertRegionInstanceGroupManagerRequest", + "InsertRegionInstanceGroupManagerResizeRequestRequest", "InsertRegionInstanceTemplateRequest", + "InsertRegionInstantSnapshotGroupRequest", "InsertRegionInstantSnapshotRequest", "InsertRegionNetworkEndpointGroupRequest", "InsertRegionNetworkFirewallPolicyRequest", "InsertRegionNotificationEndpointRequest", "InsertRegionSecurityPolicyRequest", + "InsertRegionSnapshotRequest", "InsertRegionSslCertificateRequest", "InsertRegionSslPolicyRequest", "InsertRegionTargetHttpProxyRequest", @@ -2877,6 +3009,7 @@ def _get_version(dependency_name): "InsertVpnGatewayRequest", "InsertVpnTunnelRequest", "InsertWireGroupRequest", + "InsertZoneVmExtensionPolicyRequest", "Instance", "InstanceAggregatedList", "InstanceConsumptionData", @@ -2908,6 +3041,7 @@ def _get_version(dependency_name): "InstanceGroupManagerStatusAllInstancesConfig", "InstanceGroupManagerStatusBulkInstanceOperation", "InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck", + "InstanceGroupManagerStatusInstanceStatusSummary", "InstanceGroupManagerStatusStateful", "InstanceGroupManagerStatusStatefulPerInstanceConfigs", "InstanceGroupManagerStatusVersionTarget", @@ -2978,6 +3112,11 @@ def _get_version(dependency_name): "InstancesStartWithEncryptionKeyRequest", "InstantSnapshot", "InstantSnapshotAggregatedList", + "InstantSnapshotGroup", + "InstantSnapshotGroupParameters", + "InstantSnapshotGroupResourceStatus", + "InstantSnapshotGroupSourceInfo", + "InstantSnapshotGroupsClient", "InstantSnapshotList", "InstantSnapshotParams", "InstantSnapshotResourceStatus", @@ -3112,6 +3251,8 @@ def _get_version(dependency_name): "ListInstancesInstanceGroupsRequest", "ListInstancesRegionInstanceGroupsRequest", "ListInstancesRequest", + "ListInstantSnapshotGroups", + "ListInstantSnapshotGroupsRequest", "ListInstantSnapshotsRequest", "ListInterconnectAttachmentGroupsRequest", "ListInterconnectAttachmentsRequest", @@ -3148,6 +3289,7 @@ def _get_version(dependency_name): "ListPublicDelegatedPrefixesRequest", "ListReferrersInstancesRequest", "ListRegionAutoscalersRequest", + "ListRegionBackendBucketsRequest", "ListRegionBackendServicesRequest", "ListRegionCommitmentsRequest", "ListRegionCompositeHealthChecksRequest", @@ -3157,15 +3299,18 @@ def _get_version(dependency_name): "ListRegionHealthCheckServicesRequest", "ListRegionHealthChecksRequest", "ListRegionHealthSourcesRequest", + "ListRegionInstanceGroupManagerResizeRequestsRequest", "ListRegionInstanceGroupManagersRequest", "ListRegionInstanceGroupsRequest", "ListRegionInstanceTemplatesRequest", + "ListRegionInstantSnapshotGroupsRequest", "ListRegionInstantSnapshotsRequest", "ListRegionNetworkEndpointGroupsRequest", "ListRegionNetworkFirewallPoliciesRequest", "ListRegionNotificationEndpointsRequest", "ListRegionOperationsRequest", "ListRegionSecurityPoliciesRequest", + "ListRegionSnapshotsRequest", "ListRegionSslCertificatesRequest", "ListRegionSslPoliciesRequest", "ListRegionTargetHttpProxiesRequest", @@ -3199,7 +3344,9 @@ def _get_version(dependency_name): "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendBucketsRequest", "ListUsableBackendServicesRequest", + "ListUsableRegionBackendBucketsRequest", "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", @@ -3207,6 +3354,7 @@ def _get_version(dependency_name): "ListWireGroupsRequest", "ListXpnHostsProjectsRequest", "ListZoneOperationsRequest", + "ListZoneVmExtensionPoliciesRequest", "ListZonesRequest", "LocalDisk", "LocalizedMessage", @@ -3372,6 +3520,7 @@ def _get_version(dependency_name): "PatchPublicAdvertisedPrefixeRequest", "PatchPublicDelegatedPrefixeRequest", "PatchRegionAutoscalerRequest", + "PatchRegionBackendBucketRequest", "PatchRegionBackendServiceRequest", "PatchRegionCompositeHealthCheckRequest", "PatchRegionHealthAggregationPolicyRequest", @@ -3381,6 +3530,7 @@ def _get_version(dependency_name): "PatchRegionInstanceGroupManagerRequest", "PatchRegionNetworkFirewallPolicyRequest", "PatchRegionSecurityPolicyRequest", + "PatchRegionSnapshotSettingRequest", "PatchRegionSslPolicyRequest", "PatchRegionTargetHttpsProxyRequest", "PatchRegionUrlMapRequest", @@ -3454,11 +3604,13 @@ def _get_version(dependency_name): "RegionAddressesMoveRequest", "RegionAutoscalerList", "RegionAutoscalersClient", + "RegionBackendBucketsClient", "RegionBackendServicesClient", "RegionCommitmentsClient", "RegionCompositeHealthChecksClient", "RegionDiskTypeList", "RegionDiskTypesClient", + "RegionDiskUpdateKmsKeyRequest", "RegionDisksAddResourcePoliciesRequest", "RegionDisksClient", "RegionDisksRemoveResourcePoliciesRequest", @@ -3472,6 +3624,8 @@ def _get_version(dependency_name): "RegionInstanceGroupManagerDeleteInstanceConfigReq", "RegionInstanceGroupManagerList", "RegionInstanceGroupManagerPatchInstanceConfigReq", + "RegionInstanceGroupManagerResizeRequestsClient", + "RegionInstanceGroupManagerResizeRequestsListResponse", "RegionInstanceGroupManagerUpdateInstanceConfigReq", "RegionInstanceGroupManagersAbandonInstancesRequest", "RegionInstanceGroupManagersApplyUpdatesRequest", @@ -3494,6 +3648,7 @@ def _get_version(dependency_name): "RegionInstanceGroupsSetNamedPortsRequest", "RegionInstanceTemplatesClient", "RegionInstancesClient", + "RegionInstantSnapshotGroupsClient", "RegionInstantSnapshotsClient", "RegionList", "RegionNetworkEndpointGroupsAttachEndpointsRequest", @@ -3507,6 +3662,9 @@ def _get_version(dependency_name): "RegionSecurityPoliciesClient", "RegionSetLabelsRequest", "RegionSetPolicyRequest", + "RegionSnapshotSettingsClient", + "RegionSnapshotUpdateKmsKeyRequest", + "RegionSnapshotsClient", "RegionSslCertificatesClient", "RegionSslPoliciesClient", "RegionTargetHttpProxiesClient", @@ -3721,6 +3879,7 @@ def _get_version(dependency_name): "SetIamPolicyImageRequest", "SetIamPolicyInstanceRequest", "SetIamPolicyInstanceTemplateRequest", + "SetIamPolicyInstantSnapshotGroupRequest", "SetIamPolicyInstantSnapshotRequest", "SetIamPolicyInterconnectAttachmentGroupRequest", "SetIamPolicyInterconnectGroupRequest", @@ -3730,10 +3889,13 @@ def _get_version(dependency_name): "SetIamPolicyNetworkFirewallPolicyRequest", "SetIamPolicyNodeGroupRequest", "SetIamPolicyNodeTemplateRequest", + "SetIamPolicyRegionBackendBucketRequest", "SetIamPolicyRegionBackendServiceRequest", "SetIamPolicyRegionDiskRequest", + "SetIamPolicyRegionInstantSnapshotGroupRequest", "SetIamPolicyRegionInstantSnapshotRequest", "SetIamPolicyRegionNetworkFirewallPolicyRequest", + "SetIamPolicyRegionSnapshotRequest", "SetIamPolicyReservationBlockRequest", "SetIamPolicyReservationRequest", "SetIamPolicyReservationSubBlockRequest", @@ -3759,6 +3921,7 @@ def _get_version(dependency_name): "SetLabelsRegionDiskRequest", "SetLabelsRegionInstantSnapshotRequest", "SetLabelsRegionSecurityPolicyRequest", + "SetLabelsRegionSnapshotRequest", "SetLabelsSecurityPolicyRequest", "SetLabelsSnapshotRequest", "SetLabelsTargetVpnGatewayRequest", @@ -3809,12 +3972,16 @@ def _get_version(dependency_name): "SimulateMaintenanceEventInstanceRequest", "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", + "SnapshotGroupParameters", "SnapshotList", "SnapshotParams", "SnapshotSettings", + "SnapshotSettingsAccessLocation", + "SnapshotSettingsAccessLocationAccessLocationPreference", "SnapshotSettingsServiceClient", "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + "SnapshotUpdateKmsKeyRequest", "SnapshotsClient", "SourceDiskEncryptionKey", "SourceInstanceParams", @@ -3954,6 +4121,7 @@ def _get_version(dependency_name): "TestIamPermissionsInstanceGroupRequest", "TestIamPermissionsInstanceRequest", "TestIamPermissionsInstanceTemplateRequest", + "TestIamPermissionsInstantSnapshotGroupRequest", "TestIamPermissionsInstantSnapshotRequest", "TestIamPermissionsInterconnectAttachmentGroupRequest", "TestIamPermissionsInterconnectGroupRequest", @@ -3967,6 +4135,7 @@ def _get_version(dependency_name): "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", "TestIamPermissionsRegionAutoscalerRequest", + "TestIamPermissionsRegionBackendBucketRequest", "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionCompositeHealthCheckRequest", "TestIamPermissionsRegionDiskRequest", @@ -3975,9 +4144,11 @@ def _get_version(dependency_name): "TestIamPermissionsRegionHealthCheckServiceRequest", "TestIamPermissionsRegionHealthSourceRequest", "TestIamPermissionsRegionInstanceGroupRequest", + "TestIamPermissionsRegionInstantSnapshotGroupRequest", "TestIamPermissionsRegionInstantSnapshotRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsRegionNotificationEndpointRequest", + "TestIamPermissionsRegionSnapshotRequest", "TestIamPermissionsReservationBlockRequest", "TestIamPermissionsReservationRequest", "TestIamPermissionsReservationSubBlockRequest", @@ -4007,6 +4178,10 @@ def _get_version(dependency_name): "UpdateFutureReservationRequest", "UpdateHealthCheckRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNetworkInterfaceInstanceRequest", "UpdatePeeringNetworkRequest", @@ -4026,6 +4201,7 @@ def _get_version(dependency_name): "UpdateShieldedInstanceConfigInstanceRequest", "UpdateStoragePoolRequest", "UpdateUrlMapRequest", + "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", "UrlMapReference", @@ -4048,6 +4224,11 @@ def _get_version(dependency_name): "VmEndpointNatMappingsInterfaceNatMappings", "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", + "VmExtensionPolicy", + "VmExtensionPolicyExtensionPolicy", + "VmExtensionPolicyInstanceSelector", + "VmExtensionPolicyLabelSelector", + "VmExtensionPolicyList", "VpnGateway", "VpnGatewayAggregatedList", "VpnGatewayList", @@ -4098,5 +4279,6 @@ def _get_version(dependency_name): "ZoneSetLabelsRequest", "ZoneSetNestedPolicyRequest", "ZoneSetPolicyRequest", + "ZoneVmExtensionPoliciesClient", "ZonesClient", ) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json index 62bcc73597d3..1364eae6590d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json +++ b/packages/google-cloud-compute/google/cloud/compute_v1/gapic_metadata.json @@ -151,6 +151,11 @@ "add_signed_url_key" ] }, + "AggregatedList": { + "methods": [ + "aggregated_list" + ] + }, "Delete": { "methods": [ "delete" @@ -181,6 +186,11 @@ "list" ] }, + "ListUsable": { + "methods": [ + "list_usable" + ] + }, "Patch": { "methods": [ "patch" @@ -461,6 +471,11 @@ "methods": [ "update" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } @@ -1646,6 +1661,50 @@ } } }, + "InstantSnapshotGroups": { + "clients": { + "rest": { + "libraryClient": "InstantSnapshotGroupsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, "InstantSnapshots": { "clients": { "rest": { @@ -2959,6 +3018,60 @@ } } }, + "RegionBackendBuckets": { + "clients": { + "rest": { + "libraryClient": "RegionBackendBucketsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "ListUsable": { + "methods": [ + "list_usable" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, "RegionBackendServices": { "clients": { "rest": { @@ -3082,6 +3195,11 @@ "get" ] }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, "Insert": { "methods": [ "insert" @@ -3214,6 +3332,11 @@ "methods": [ "update" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } @@ -3371,6 +3494,11 @@ "get" ] }, + "GetHealth": { + "methods": [ + "get_health" + ] + }, "Insert": { "methods": [ "insert" @@ -3395,6 +3523,40 @@ } } }, + "RegionInstanceGroupManagerResizeRequests": { + "clients": { + "rest": { + "libraryClient": "RegionInstanceGroupManagerResizeRequestsClient", + "rpcs": { + "Cancel": { + "methods": [ + "cancel" + ] + }, + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + } + } + } + } + }, "RegionInstanceGroupManagers": { "clients": { "rest": { @@ -3596,6 +3758,50 @@ } } }, + "RegionInstantSnapshotGroups": { + "clients": { + "rest": { + "libraryClient": "RegionInstantSnapshotGroupsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + } + } + } + } + }, "RegionInstantSnapshots": { "clients": { "rest": { @@ -3910,6 +4116,79 @@ } } }, + "RegionSnapshotSettings": { + "clients": { + "rest": { + "libraryClient": "RegionSnapshotSettingsClient", + "rpcs": { + "Get": { + "methods": [ + "get" + ] + }, + "Patch": { + "methods": [ + "patch" + ] + } + } + } + } + }, + "RegionSnapshots": { + "clients": { + "rest": { + "libraryClient": "RegionSnapshotsClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "GetIamPolicy": { + "methods": [ + "get_iam_policy" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "SetIamPolicy": { + "methods": [ + "set_iam_policy" + ] + }, + "SetLabels": { + "methods": [ + "set_labels" + ] + }, + "TestIamPermissions": { + "methods": [ + "test_iam_permissions" + ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] + } + } + } + } + }, "RegionSslCertificates": { "clients": { "rest": { @@ -4711,6 +4990,11 @@ "methods": [ "test_iam_permissions" ] + }, + "UpdateKmsKey": { + "methods": [ + "update_kms_key" + ] } } } @@ -5563,6 +5847,40 @@ } } }, + "ZoneVmExtensionPolicies": { + "clients": { + "rest": { + "libraryClient": "ZoneVmExtensionPoliciesClient", + "rpcs": { + "Delete": { + "methods": [ + "delete" + ] + }, + "Get": { + "methods": [ + "get" + ] + }, + "Insert": { + "methods": [ + "insert" + ] + }, + "List": { + "methods": [ + "list" + ] + }, + "Update": { + "methods": [ + "update" + ] + } + } + } + } + }, "Zones": { "clients": { "rest": { diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py index c8181650930d..b584c5313222 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/client.py @@ -1003,6 +1003,134 @@ def error_code(self): # Done; return the response. return response + def aggregated_list( + self, + request: Optional[ + Union[compute.AggregatedListBackendBucketsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.AggregatedListPager: + r"""Retrieves the list of all BackendBucket resources, regional and + global, available to the specified project. + + To prevent failure, it is recommended that you set the + ``returnPartialSuccess`` parameter to ``true``. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_aggregated_list(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListBackendBucketsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.AggregatedListBackendBucketsRequest, dict]): + The request object. A request message for + BackendBuckets.AggregatedList. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.backend_buckets.pagers.AggregatedListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.AggregatedListBackendBucketsRequest): + request = compute.AggregatedListBackendBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.aggregated_list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.AggregatedListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def delete_unary( self, request: Optional[Union[compute.DeleteBackendBucketRequest, dict]] = None, @@ -2233,6 +2361,127 @@ def sample_list(): # Done; return the response. return response + def list_usable( + self, + request: Optional[Union[compute.ListUsableBackendBucketsRequest, dict]] = None, + *, + project: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListUsablePager: + r"""Retrieves a list of all usable backend buckets in the + specified project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_usable(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableBackendBucketsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListUsableBackendBucketsRequest, dict]): + The request object. A request message for + BackendBuckets.ListUsable. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.backend_buckets.pagers.ListUsablePager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListUsableBackendBucketsRequest): + request = compute.ListUsableBackendBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("project", request.project),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsablePager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def patch_unary( self, request: Optional[Union[compute.PatchBackendBucketRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/pagers.py index 8b5f62591182..abc39a8d4d37 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/pagers.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/pagers.py @@ -41,6 +41,85 @@ from google.cloud.compute_v1.types import compute +class AggregatedListPager: + """A pager for iterating through ``aggregated_list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendBucketAggregatedList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``AggregatedList`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendBucketAggregatedList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.BackendBucketAggregatedList], + request: compute.AggregatedListBackendBucketsRequest, + response: compute.BackendBucketAggregatedList, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.AggregatedListBackendBucketsRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendBucketAggregatedList): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.AggregatedListBackendBucketsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendBucketAggregatedList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[Tuple[str, compute.BackendBucketsScopedList]]: + for page in self.pages: + yield from page.items.items() + + def get(self, key: str) -> Optional[compute.BackendBucketsScopedList]: + return self._response.items.get(key) + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListPager: """A pager for iterating through ``list`` requests. @@ -115,3 +194,79 @@ def __iter__(self) -> Iterator[compute.BackendBucket]: def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsablePager: + """A pager for iterating through ``list_usable`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendBucketListUsable` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsable`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendBucketListUsable` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.BackendBucketListUsable], + request: compute.ListUsableBackendBucketsRequest, + response: compute.BackendBucketListUsable, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListUsableBackendBucketsRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendBucketListUsable): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListUsableBackendBucketsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendBucketListUsable]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendBucket]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py index e20ca3c3e145..0728af9b29ff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/base.py @@ -152,6 +152,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.aggregated_list: gapic_v1.method.wrap_method( + self.aggregated_list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), self.delete: gapic_v1.method.wrap_method( self.delete, default_timeout=600.0, @@ -212,6 +227,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.list_usable: gapic_v1.method.wrap_method( + self.list_usable, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), self.patch: gapic_v1.method.wrap_method( self.patch, default_timeout=600.0, @@ -257,6 +287,18 @@ def add_signed_url_key( ]: raise NotImplementedError() + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListBackendBucketsRequest], + Union[ + compute.BackendBucketAggregatedList, + Awaitable[compute.BackendBucketAggregatedList], + ], + ]: + raise NotImplementedError() + @property def delete( self, @@ -311,6 +353,17 @@ def list( ]: raise NotImplementedError() + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableBackendBucketsRequest], + Union[ + compute.BackendBucketListUsable, Awaitable[compute.BackendBucketListUsable] + ], + ]: + raise NotImplementedError() + @property def patch( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py index b29b4f128c92..b8737d1b1b67 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest.py @@ -80,6 +80,14 @@ def post_add_signed_url_key(self, response): logging.log(f"Received response: {response}") return response + def pre_aggregated_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_aggregated_list(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -128,6 +136,14 @@ def post_list(self, response): logging.log(f"Received response: {response}") return response + def pre_list_usable(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable(self, response): + logging.log(f"Received response: {response}") + return response + def pre_patch(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -221,6 +237,57 @@ def post_add_signed_url_key_with_metadata( """ return response, metadata + def pre_aggregated_list( + self, + request: compute.AggregatedListBackendBucketsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.AggregatedListBackendBucketsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for aggregated_list + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_aggregated_list( + self, response: compute.BackendBucketAggregatedList + ) -> compute.BackendBucketAggregatedList: + """Post-rpc interceptor for aggregated_list + + DEPRECATED. Please use the `post_aggregated_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. This `post_aggregated_list` interceptor runs + before the `post_aggregated_list_with_metadata` interceptor. + """ + return response + + def post_aggregated_list_with_metadata( + self, + response: compute.BackendBucketAggregatedList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendBucketAggregatedList, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for aggregated_list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_aggregated_list_with_metadata` + interceptor in new development instead of the `post_aggregated_list` interceptor. + When both interceptors are used, this `post_aggregated_list_with_metadata` interceptor runs after the + `post_aggregated_list` interceptor. The (possibly modified) response returned by + `post_aggregated_list` will be passed to + `post_aggregated_list_with_metadata`. + """ + return response, metadata + def pre_delete( self, request: compute.DeleteBackendBucketRequest, @@ -503,6 +570,56 @@ def post_list_with_metadata( """ return response, metadata + def pre_list_usable( + self, + request: compute.ListUsableBackendBucketsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListUsableBackendBucketsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_usable + + Override in a subclass to manipulate the request or metadata + before they are sent to the BackendBuckets server. + """ + return request, metadata + + def post_list_usable( + self, response: compute.BackendBucketListUsable + ) -> compute.BackendBucketListUsable: + """Post-rpc interceptor for list_usable + + DEPRECATED. Please use the `post_list_usable_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the BackendBuckets server but before + it is returned to user code. This `post_list_usable` interceptor runs + before the `post_list_usable_with_metadata` interceptor. + """ + return response + + def post_list_usable_with_metadata( + self, + response: compute.BackendBucketListUsable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendBucketListUsable, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_usable + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the BackendBuckets server but before it is returned to user code. + + We recommend only using this `post_list_usable_with_metadata` + interceptor in new development instead of the `post_list_usable` interceptor. + When both interceptors are used, this `post_list_usable_with_metadata` interceptor runs after the + `post_list_usable` interceptor. The (possibly modified) response returned by + `post_list_usable` will be passed to + `post_list_usable_with_metadata`. + """ + return response, metadata + def pre_patch( self, request: compute.PatchBackendBucketRequest, @@ -1020,6 +1137,154 @@ def __call__( ) return resp + class _AggregatedList( + _BaseBackendBucketsRestTransport._BaseAggregatedList, BackendBucketsRestStub + ): + def __hash__(self): + return hash("BackendBucketsRestTransport.AggregatedList") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.AggregatedListBackendBucketsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.BackendBucketAggregatedList: + r"""Call the aggregated list method over HTTP. + + Args: + request (~.compute.AggregatedListBackendBucketsRequest): + The request object. A request message for + BackendBuckets.AggregatedList. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.BackendBucketAggregatedList: + + """ + + http_options = ( + _BaseBackendBucketsRestTransport._BaseAggregatedList._get_http_options() + ) + + request, metadata = self._interceptor.pre_aggregated_list(request, metadata) + transcoded_request = _BaseBackendBucketsRestTransport._BaseAggregatedList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseBackendBucketsRestTransport._BaseAggregatedList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.BackendBucketsClient.AggregatedList", + extra={ + "serviceName": "google.cloud.compute.v1.BackendBuckets", + "rpcName": "AggregatedList", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackendBucketsRestTransport._AggregatedList._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucketAggregatedList() + pb_resp = compute.BackendBucketAggregatedList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_aggregated_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_aggregated_list_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.BackendBucketAggregatedList.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.BackendBucketsClient.aggregated_list", + extra={ + "serviceName": "google.cloud.compute.v1.BackendBuckets", + "rpcName": "AggregatedList", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _Delete(_BaseBackendBucketsRestTransport._BaseDelete, BackendBucketsRestStub): def __hash__(self): return hash("BackendBucketsRestTransport.Delete") @@ -2070,6 +2335,154 @@ def __call__( ) return resp + class _ListUsable( + _BaseBackendBucketsRestTransport._BaseListUsable, BackendBucketsRestStub + ): + def __hash__(self): + return hash("BackendBucketsRestTransport.ListUsable") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListUsableBackendBucketsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.BackendBucketListUsable: + r"""Call the list usable method over HTTP. + + Args: + request (~.compute.ListUsableBackendBucketsRequest): + The request object. A request message for + BackendBuckets.ListUsable. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.BackendBucketListUsable: + + """ + + http_options = ( + _BaseBackendBucketsRestTransport._BaseListUsable._get_http_options() + ) + + request, metadata = self._interceptor.pre_list_usable(request, metadata) + transcoded_request = _BaseBackendBucketsRestTransport._BaseListUsable._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseBackendBucketsRestTransport._BaseListUsable._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.BackendBucketsClient.ListUsable", + extra={ + "serviceName": "google.cloud.compute.v1.BackendBuckets", + "rpcName": "ListUsable", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = BackendBucketsRestTransport._ListUsable._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucketListUsable() + pb_resp = compute.BackendBucketListUsable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_usable(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_usable_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.BackendBucketListUsable.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.BackendBucketsClient.list_usable", + extra={ + "serviceName": "google.cloud.compute.v1.BackendBuckets", + "rpcName": "ListUsable", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _Patch(_BaseBackendBucketsRestTransport._BasePatch, BackendBucketsRestStub): def __hash__(self): return hash("BackendBucketsRestTransport.Patch") @@ -3001,6 +3414,17 @@ def add_signed_url_key( # In C++ this would require a dynamic_cast return self._AddSignedUrlKey(self._session, self._host, self._interceptor) # type: ignore + @property + def aggregated_list( + self, + ) -> Callable[ + [compute.AggregatedListBackendBucketsRequest], + compute.BackendBucketAggregatedList, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._AggregatedList(self._session, self._host, self._interceptor) # type: ignore + @property def delete( self, @@ -3047,6 +3471,16 @@ def list( # In C++ this would require a dynamic_cast return self._List(self._session, self._host, self._interceptor) # type: ignore + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableBackendBucketsRequest], compute.BackendBucketListUsable + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsable(self._session, self._host, self._interceptor) # type: ignore + @property def patch(self) -> Callable[[compute.PatchBackendBucketRequest], compute.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest_base.py index 93ec3a9e5722..9fbcccae25e0 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/backend_buckets/transports/rest_base.py @@ -143,6 +143,52 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseAggregatedList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/aggregated/backendBuckets", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.AggregatedListBackendBucketsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseBackendBucketsRestTransport._BaseAggregatedList._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BaseDelete: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -431,6 +477,52 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseListUsable: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/global/backendBuckets/listUsable", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListUsableBackendBucketsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseBackendBucketsRestTransport._BaseListUsable._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BasePatch: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py index fc613c924358..fc93ec906e83 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/client.py @@ -5642,6 +5642,329 @@ def error_code(self): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[Union[compute.UpdateKmsKeyDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disk_update_kms_key_request_resource: Optional[ + compute.DiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeyDiskRequest, dict]): + The request object. A request message for + Disks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_update_kms_key_request_resource (google.cloud.compute_v1.types.DiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, disk, disk_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyDiskRequest): + request = compute.UpdateKmsKeyDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disk_update_kms_key_request_resource is not None: + request.disk_update_kms_key_request_resource = ( + disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[Union[compute.UpdateKmsKeyDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + disk: Optional[str] = None, + disk_update_kms_key_request_resource: Optional[ + compute.DiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeyDiskRequest, dict]): + The request object. A request message for + Disks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk_update_kms_key_request_resource (google.cloud.compute_v1.types.DiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, disk, disk_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyDiskRequest): + request = compute.UpdateKmsKeyDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if disk is not None: + request.disk = disk + if disk_update_kms_key_request_resource is not None: + request.disk_update_kms_key_request_resource = ( + disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "DisksClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/base.py index ccde43c9a307..93196c4b6299 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/base.py @@ -282,6 +282,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -464,6 +469,15 @@ def update( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeyDiskRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py index f303b6be791b..fbc525a0c559 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest.py @@ -224,6 +224,14 @@ def post_update(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = DisksRestTransport(interceptor=MyCustomDisksInterceptor()) client = DisksClient(transport=transport) @@ -1105,6 +1113,52 @@ def post_update_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeyDiskRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeyDiskRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the Disks server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Disks server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Disks server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class DisksRestStub: @@ -4590,6 +4644,186 @@ def __call__( ) return resp + class _UpdateKmsKey(_BaseDisksRestTransport._BaseUpdateKmsKey, DisksRestStub): + def __hash__(self): + return hash("DisksRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeyDiskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeyDiskRequest): + The request object. A request message for + Disks.UpdateKmsKey. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseDisksRestTransport._BaseUpdateKmsKey._get_http_options() + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = ( + _BaseDisksRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseDisksRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseDisksRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.DisksClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1.Disks", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DisksRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.DisksClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1.Disks", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def add_resource_policies( self, @@ -4734,6 +4968,14 @@ def update(self) -> Callable[[compute.UpdateDiskRequest], compute.Operation]: # In C++ this would require a dynamic_cast return self._Update(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeyDiskRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest_base.py index 3992de014cc3..c9909039d27d 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/disks/transports/rest_base.py @@ -1091,5 +1091,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/updateKmsKey", + "body": "disk_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeyDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseDisksRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseDisksRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/__init__.py new file mode 100644 index 000000000000..10ee23d98a6b --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import InstantSnapshotGroupsClient + +__all__ = ("InstantSnapshotGroupsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/client.py new file mode 100644 index 000000000000..1e573e2f7331 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/client.py @@ -0,0 +1,2099 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.services.instant_snapshot_groups import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, InstantSnapshotGroupsTransport +from .transports.rest import InstantSnapshotGroupsRestTransport + + +class InstantSnapshotGroupsClientMeta(type): + """Metaclass for the InstantSnapshotGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[InstantSnapshotGroupsTransport]] + _transport_registry["rest"] = InstantSnapshotGroupsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[InstantSnapshotGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class InstantSnapshotGroupsClient(metaclass=InstantSnapshotGroupsClientMeta): + """The InstantSnapshotGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstantSnapshotGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + InstantSnapshotGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> InstantSnapshotGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + InstantSnapshotGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = InstantSnapshotGroupsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = InstantSnapshotGroupsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = InstantSnapshotGroupsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = InstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + InstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = InstantSnapshotGroupsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + InstantSnapshotGroupsTransport, + Callable[..., InstantSnapshotGroupsTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the instant snapshot groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,InstantSnapshotGroupsTransport,Callable[..., InstantSnapshotGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the InstantSnapshotGroupsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + InstantSnapshotGroupsClient._read_environment_variables() + ) + self._client_cert_source = InstantSnapshotGroupsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = InstantSnapshotGroupsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, InstantSnapshotGroupsTransport) + if transport_provided: + # transport is a InstantSnapshotGroupsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(InstantSnapshotGroupsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or InstantSnapshotGroupsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[InstantSnapshotGroupsTransport], + Callable[..., InstantSnapshotGroupsTransport], + ] = ( + InstantSnapshotGroupsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., InstantSnapshotGroupsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.InstantSnapshotGroupsClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "credentialsType": None, + }, + ) + + def delete_unary( + self, + request: Optional[ + Union[compute.DeleteInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instant_snapshot_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""deletes a Zonal InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group (str): + Name of the InstantSnapshot resource + to delete. + + This corresponds to the ``instant_snapshot_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, instant_snapshot_group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteInstantSnapshotGroupRequest): + request = compute.DeleteInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instant_snapshot_group is not None: + request.instant_snapshot_group = instant_snapshot_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instant_snapshot_group", request.instant_snapshot_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[ + Union[compute.DeleteInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instant_snapshot_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""deletes a Zonal InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group (str): + Name of the InstantSnapshot resource + to delete. + + This corresponds to the ``instant_snapshot_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, instant_snapshot_group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteInstantSnapshotGroupRequest): + request = compute.DeleteInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instant_snapshot_group is not None: + request.instant_snapshot_group = instant_snapshot_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instant_snapshot_group", request.instant_snapshot_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[Union[compute.GetInstantSnapshotGroupRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instant_snapshot_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.InstantSnapshotGroup: + r"""returns the specified InstantSnapshotGroup resource + in the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group (str): + Name of the InstantSnapshotGroup + resource to return. + + This corresponds to the ``instant_snapshot_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.InstantSnapshotGroup: + Represents an InstantSnapshotGroup + resource. + An instant snapshot group is a set of + instant snapshots that represents a + point in time state of a consistency + group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, instant_snapshot_group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetInstantSnapshotGroupRequest): + request = compute.GetInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instant_snapshot_group is not None: + request.instant_snapshot_group = instant_snapshot_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("instant_snapshot_group", request.instant_snapshot_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[ + Union[compute.GetIamPolicyInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyInstantSnapshotGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.GetIamPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetIamPolicyInstantSnapshotGroupRequest): + request = compute.GetIamPolicyInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[ + Union[compute.InsertInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instant_snapshot_group_resource: Optional[compute.InstantSnapshotGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""inserts a Zonal InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstantSnapshotGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group_resource (google.cloud.compute_v1.types.InstantSnapshotGroup): + The body resource for this request + This corresponds to the ``instant_snapshot_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, instant_snapshot_group_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertInstantSnapshotGroupRequest): + request = compute.InsertInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instant_snapshot_group_resource is not None: + request.instant_snapshot_group_resource = ( + instant_snapshot_group_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[ + Union[compute.InsertInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + instant_snapshot_group_resource: Optional[compute.InstantSnapshotGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""inserts a Zonal InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstantSnapshotGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group_resource (google.cloud.compute_v1.types.InstantSnapshotGroup): + The body resource for this request + This corresponds to the ``instant_snapshot_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, instant_snapshot_group_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertInstantSnapshotGroupRequest): + request = compute.InsertInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if instant_snapshot_group_resource is not None: + request.instant_snapshot_group_resource = ( + instant_snapshot_group_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[Union[compute.ListInstantSnapshotGroupsRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPager: + r"""retrieves the list of InstantSnapshotGroup resources + contained within the specified zone. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstantSnapshotGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListInstantSnapshotGroupsRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.instant_snapshot_groups.pagers.ListPager: + Contains a list of + InstantSnapshotGroup resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListInstantSnapshotGroupsRequest): + request = compute.ListInstantSnapshotGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[ + Union[compute.SetIamPolicyInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + zone_set_policy_request_resource: Optional[compute.ZoneSetPolicyRequest] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyInstantSnapshotGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.SetIamPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + This corresponds to the ``zone_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, resource, zone_set_policy_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.SetIamPolicyInstantSnapshotGroupRequest): + request = compute.SetIamPolicyInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if zone_set_policy_request_resource is not None: + request.zone_set_policy_request_resource = ( + zone_set_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsInstantSnapshotGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsInstantSnapshotGroupRequest, dict]): + The request object. A request message for + InstantSnapshotGroups.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + The name of the zone for this + request. + + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, resource, test_permissions_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.TestIamPermissionsInstantSnapshotGroupRequest + ): + request = compute.TestIamPermissionsInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "InstantSnapshotGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("InstantSnapshotGroupsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/pagers.py new file mode 100644 index 000000000000..1b22892761dc --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/pagers.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ListInstantSnapshotGroups` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ListInstantSnapshotGroups` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.ListInstantSnapshotGroups], + request: compute.ListInstantSnapshotGroupsRequest, + response: compute.ListInstantSnapshotGroups, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListInstantSnapshotGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ListInstantSnapshotGroups): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListInstantSnapshotGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ListInstantSnapshotGroups]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.InstantSnapshotGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/README.rst new file mode 100644 index 000000000000..d02edc26ae2d --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``InstantSnapshotGroupsTransport`` is the ABC for all transports. + +- public child ``InstantSnapshotGroupsGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``InstantSnapshotGroupsGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseInstantSnapshotGroupsRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``InstantSnapshotGroupsRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/__init__.py new file mode 100644 index 000000000000..f8e754e3c449 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import InstantSnapshotGroupsTransport +from .rest import ( + InstantSnapshotGroupsRestInterceptor, + InstantSnapshotGroupsRestTransport, +) + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[InstantSnapshotGroupsTransport]] +_transport_registry["rest"] = InstantSnapshotGroupsRestTransport + +__all__ = ( + "InstantSnapshotGroupsTransport", + "InstantSnapshotGroupsRestTransport", + "InstantSnapshotGroupsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/base.py new file mode 100644 index 000000000000..9bb6b02a6221 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/base.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import zone_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InstantSnapshotGroupsTransport(abc.ABC): + """Abstract transport class for InstantSnapshotGroups.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=600.0, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=600.0, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=600.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteInstantSnapshotGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetInstantSnapshotGroupRequest], + Union[compute.InstantSnapshotGroup, Awaitable[compute.InstantSnapshotGroup]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyInstantSnapshotGroupRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertInstantSnapshotGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListInstantSnapshotGroupsRequest], + Union[ + compute.ListInstantSnapshotGroups, + Awaitable[compute.ListInstantSnapshotGroups], + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyInstantSnapshotGroupRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsInstantSnapshotGroupRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("InstantSnapshotGroupsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/rest.py new file mode 100644 index 000000000000..997e582d549a --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/rest.py @@ -0,0 +1,1899 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseInstantSnapshotGroupsRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class InstantSnapshotGroupsRestInterceptor: + """Interceptor for InstantSnapshotGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the InstantSnapshotGroupsRestTransport. + + .. code-block:: python + class MyCustomInstantSnapshotGroupsInterceptor(InstantSnapshotGroupsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = InstantSnapshotGroupsRestTransport(interceptor=MyCustomInstantSnapshotGroupsInterceptor()) + client = InstantSnapshotGroupsClient(transport=transport) + + + """ + + def pre_delete( + self, + request: compute.DeleteInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. + """ + return response + + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + + def pre_get( + self, + request: compute.GetInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetInstantSnapshotGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_get( + self, response: compute.InstantSnapshotGroup + ) -> compute.InstantSnapshotGroup: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.InstantSnapshotGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstantSnapshotGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: compute.GetIamPolicyInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetIamPolicyInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_insert( + self, + request: compute.InsertInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InsertInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. + """ + return response + + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + + def pre_list( + self, + request: compute.ListInstantSnapshotGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListInstantSnapshotGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_list( + self, response: compute.ListInstantSnapshotGroups + ) -> compute.ListInstantSnapshotGroups: + """Post-rpc interceptor for list + + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. + """ + return response + + def post_list_with_metadata( + self, + response: compute.ListInstantSnapshotGroups, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListInstantSnapshotGroups, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy( + self, + request: compute.SetIamPolicyInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SetIamPolicyInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestIamPermissionsInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the InstantSnapshotGroups server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the InstantSnapshotGroups server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the InstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class InstantSnapshotGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: InstantSnapshotGroupsRestInterceptor + + +class InstantSnapshotGroupsRestTransport(_BaseInstantSnapshotGroupsRestTransport): + """REST backend synchronous transport for InstantSnapshotGroups. + + The InstantSnapshotGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[InstantSnapshotGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[InstantSnapshotGroupsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or InstantSnapshotGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete( + _BaseInstantSnapshotGroupsRestTransport._BaseDelete, + InstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.Delete") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.DeleteInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteInstantSnapshotGroupRequest): + The request object. A request message for + InstantSnapshotGroups.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseInstantSnapshotGroupsRestTransport._BaseDelete._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete(request, metadata) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseDelete._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInstantSnapshotGroupsRestTransport._BaseDelete._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.Delete", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "Delete", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstantSnapshotGroupsRestTransport._Delete._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.delete", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "Delete", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Get( + _BaseInstantSnapshotGroupsRestTransport._BaseGet, InstantSnapshotGroupsRestStub + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.InstantSnapshotGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetInstantSnapshotGroupRequest): + The request object. A request message for + InstantSnapshotGroups.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.InstantSnapshotGroup: + Represents an InstantSnapshotGroup + resource. + An instant snapshot group is a set of + instant snapshots that represents a + point in time state of a consistency + group. + + """ + + http_options = ( + _BaseInstantSnapshotGroupsRestTransport._BaseGet._get_http_options() + ) + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseInstantSnapshotGroupsRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstantSnapshotGroupsRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstantSnapshotGroup() + pb_resp = compute.InstantSnapshotGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.InstantSnapshotGroup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy( + _BaseInstantSnapshotGroupsRestTransport._BaseGetIamPolicy, + InstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetIamPolicyInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyInstantSnapshotGroupRequest): + The request object. A request message for + InstantSnapshotGroups.GetIamPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstantSnapshotGroupsRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.get_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Insert( + _BaseInstantSnapshotGroupsRestTransport._BaseInsert, + InstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.Insert") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.InsertInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertInstantSnapshotGroupRequest): + The request object. A request message for + InstantSnapshotGroups.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseInstantSnapshotGroupsRestTransport._BaseInsert._get_http_options() + ) + + request, metadata = self._interceptor.pre_insert(request, metadata) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseInsert._get_transcoded_request( + http_options, request + ) + + body = _BaseInstantSnapshotGroupsRestTransport._BaseInsert._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInstantSnapshotGroupsRestTransport._BaseInsert._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.Insert", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "Insert", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstantSnapshotGroupsRestTransport._Insert._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.insert", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "Insert", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _List( + _BaseInstantSnapshotGroupsRestTransport._BaseList, InstantSnapshotGroupsRestStub + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.List") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListInstantSnapshotGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.ListInstantSnapshotGroups: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListInstantSnapshotGroupsRequest): + The request object. A request message for + InstantSnapshotGroups.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.ListInstantSnapshotGroups: + Contains a list of + InstantSnapshotGroup resources. + + """ + + http_options = ( + _BaseInstantSnapshotGroupsRestTransport._BaseList._get_http_options() + ) + + request, metadata = self._interceptor.pre_list(request, metadata) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseInstantSnapshotGroupsRestTransport._BaseList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.List", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "List", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstantSnapshotGroupsRestTransport._List._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ListInstantSnapshotGroups() + pb_resp = compute.ListInstantSnapshotGroups.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.ListInstantSnapshotGroups.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.list", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "List", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy( + _BaseInstantSnapshotGroupsRestTransport._BaseSetIamPolicy, + InstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.SetIamPolicyInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyInstantSnapshotGroupRequest): + The request object. A request message for + InstantSnapshotGroups.SetIamPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = InstantSnapshotGroupsRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.set_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions( + _BaseInstantSnapshotGroupsRestTransport._BaseTestIamPermissions, + InstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("InstantSnapshotGroupsRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.TestIamPermissionsInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsInstantSnapshotGroupRequest): + The request object. A request message for + InstantSnapshotGroups.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options = _BaseInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.InstantSnapshotGroupsClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + InstantSnapshotGroupsRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.TestPermissionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.InstantSnapshotGroupsClient.test_iam_permissions", + extra={ + "serviceName": "google.cloud.compute.v1.InstantSnapshotGroups", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def delete( + self, + ) -> Callable[[compute.DeleteInstantSnapshotGroupRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[ + [compute.GetInstantSnapshotGroupRequest], compute.InstantSnapshotGroup + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyInstantSnapshotGroupRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[[compute.InsertInstantSnapshotGroupRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[ + [compute.ListInstantSnapshotGroupsRequest], compute.ListInstantSnapshotGroups + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyInstantSnapshotGroupRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsInstantSnapshotGroupRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("InstantSnapshotGroupsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/rest_base.py new file mode 100644 index 000000000000..2068cb82b7fd --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/instant_snapshot_groups/transports/rest_base.py @@ -0,0 +1,445 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, InstantSnapshotGroupsTransport + + +class _BaseInstantSnapshotGroupsRestTransport(InstantSnapshotGroupsTransport): + """Base REST backend transport for InstantSnapshotGroups. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDelete: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{instant_snapshot_group}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.DeleteInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseDelete._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{instant_snapshot_group}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{resource}/getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetIamPolicyInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseInsert: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups", + "body": "instant_snapshot_group_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.InsertInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseInsert._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListInstantSnapshotGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseList._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{resource}/setIamPolicy", + "body": "zone_set_policy_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.SetIamPolicyInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.TestIamPermissionsInstantSnapshotGroupRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseInstantSnapshotGroupsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/__init__.py new file mode 100644 index 000000000000..2cea388ac68e --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionBackendBucketsClient + +__all__ = ("RegionBackendBucketsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/client.py new file mode 100644 index 000000000000..f90c7c06f6d3 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/client.py @@ -0,0 +1,2558 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.services.region_backend_buckets import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, RegionBackendBucketsTransport +from .transports.rest import RegionBackendBucketsRestTransport + + +class RegionBackendBucketsClientMeta(type): + """Metaclass for the RegionBackendBuckets client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionBackendBucketsTransport]] + _transport_registry["rest"] = RegionBackendBucketsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RegionBackendBucketsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionBackendBucketsClient(metaclass=RegionBackendBucketsClientMeta): + """The RegionBackendBuckets API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionBackendBucketsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionBackendBucketsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionBackendBucketsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionBackendBucketsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = RegionBackendBucketsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = RegionBackendBucketsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RegionBackendBucketsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RegionBackendBucketsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = RegionBackendBucketsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RegionBackendBucketsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + RegionBackendBucketsTransport, + Callable[..., RegionBackendBucketsTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region backend buckets client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionBackendBucketsTransport,Callable[..., RegionBackendBucketsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionBackendBucketsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + RegionBackendBucketsClient._read_environment_variables() + ) + self._client_cert_source = RegionBackendBucketsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = RegionBackendBucketsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RegionBackendBucketsTransport) + if transport_provided: + # transport is a RegionBackendBucketsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(RegionBackendBucketsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RegionBackendBucketsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RegionBackendBucketsTransport], + Callable[..., RegionBackendBucketsTransport], + ] = ( + RegionBackendBucketsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionBackendBucketsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.RegionBackendBucketsClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "credentialsType": None, + }, + ) + + def delete_unary( + self, + request: Optional[Union[compute.DeleteRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Deletes the specified regional BackendBucket + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + delete. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteRegionBackendBucketRequest): + request = compute.DeleteRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket is not None: + request.backend_bucket = backend_bucket + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_bucket", request.backend_bucket), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[Union[compute.DeleteRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified regional BackendBucket + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + delete. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteRegionBackendBucketRequest): + request = compute.DeleteRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket is not None: + request.backend_bucket = backend_bucket + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_bucket", request.backend_bucket), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[Union[compute.GetRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.BackendBucket: + r"""Returns the specified regional BackendBucket + resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + return. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.BackendBucket: + Represents a Cloud Storage Bucket + resource. + This Cloud Storage bucket resource is + referenced by a URL map of a load + balancer. For more information, + readBackend Buckets. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetRegionBackendBucketRequest): + request = compute.GetRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket is not None: + request.backend_bucket = backend_bucket + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_bucket", request.backend_bucket), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[ + Union[compute.GetIamPolicyRegionBackendBucketRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionBackendBucketRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.GetIamPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetIamPolicyRegionBackendBucketRequest): + request = compute.GetIamPolicyRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[Union[compute.InsertRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Creates a RegionBackendBucket in the specified + project in the given scope using the parameters that are + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionBackendBucketRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertRegionBackendBucketRequest): + request = compute.InsertRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[Union[compute.InsertRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a RegionBackendBucket in the specified + project in the given scope using the parameters that are + included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionBackendBucketRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertRegionBackendBucketRequest): + request = compute.InsertRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[Union[compute.ListRegionBackendBucketsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of BackendBucket resources + available to the specified project in the given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionBackendBucketsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionBackendBucketsRequest, dict]): + The request object. A request message for + RegionBackendBuckets.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region of this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.region_backend_buckets.pagers.ListPager: + Contains a list of BackendBucket + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListRegionBackendBucketsRequest): + request = compute.ListRegionBackendBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_usable( + self, + request: Optional[ + Union[compute.ListUsableRegionBackendBucketsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListUsablePager: + r"""Retrieves a list of all usable backend buckets in the + specified project in the given region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list_usable(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableRegionBackendBucketsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListUsableRegionBackendBucketsRequest, dict]): + The request object. A request message for + RegionBackendBuckets.ListUsable. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. It must be a string that meets + the requirements in RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.region_backend_buckets.pagers.ListUsablePager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListUsableRegionBackendBucketsRequest): + request = compute.ListUsableRegionBackendBucketsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_usable] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListUsablePager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary( + self, + request: Optional[Union[compute.PatchRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Updates the specified BackendBucket resource with + the data included in the request. This method + supportsPATCH semantics and uses theJSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + patch. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket, backend_bucket_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PatchRegionBackendBucketRequest): + request = compute.PatchRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_bucket", request.backend_bucket), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch( + self, + request: Optional[Union[compute.PatchRegionBackendBucketRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + backend_bucket: Optional[str] = None, + backend_bucket_resource: Optional[compute.BackendBucket] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Updates the specified BackendBucket resource with + the data included in the request. This method + supportsPATCH semantics and uses theJSON merge + patch format and processing rules. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket (str): + Name of the BackendBucket resource to + patch. + + This corresponds to the ``backend_bucket`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + This corresponds to the ``backend_bucket_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, backend_bucket, backend_bucket_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PatchRegionBackendBucketRequest): + request = compute.PatchRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if backend_bucket is not None: + request.backend_bucket = backend_bucket + if backend_bucket_resource is not None: + request.backend_bucket_resource = backend_bucket_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("backend_bucket", request.backend_bucket), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[ + Union[compute.SetIamPolicyRegionBackendBucketRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[ + compute.RegionSetPolicyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionBackendBucketRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.SetIamPolicy. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + region_set_policy_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.SetIamPolicyRegionBackendBucketRequest): + request = compute.SetIamPolicyRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = ( + region_set_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsRegionBackendBucketRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionBackendBucketRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionBackendBucketRequest, dict]): + The request object. A request message for + RegionBackendBuckets.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + test_permissions_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.TestIamPermissionsRegionBackendBucketRequest + ): + request = compute.TestIamPermissionsRegionBackendBucketRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionBackendBucketsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("RegionBackendBucketsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/pagers.py new file mode 100644 index 000000000000..01552ce718ea --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/pagers.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendBucketList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendBucketList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.BackendBucketList], + request: compute.ListRegionBackendBucketsRequest, + response: compute.BackendBucketList, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionBackendBucketsRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendBucketList): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListRegionBackendBucketsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendBucketList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendBucket]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListUsablePager: + """A pager for iterating through ``list_usable`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.BackendBucketListUsable` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListUsable`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.BackendBucketListUsable` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.BackendBucketListUsable], + request: compute.ListUsableRegionBackendBucketsRequest, + response: compute.BackendBucketListUsable, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListUsableRegionBackendBucketsRequest): + The initial request object. + response (google.cloud.compute_v1.types.BackendBucketListUsable): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListUsableRegionBackendBucketsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.BackendBucketListUsable]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.BackendBucket]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/README.rst new file mode 100644 index 000000000000..e7e3f32be99b --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``RegionBackendBucketsTransport`` is the ABC for all transports. + +- public child ``RegionBackendBucketsGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``RegionBackendBucketsGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseRegionBackendBucketsRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``RegionBackendBucketsRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/__init__.py new file mode 100644 index 000000000000..afd708c12c80 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionBackendBucketsTransport +from .rest import RegionBackendBucketsRestInterceptor, RegionBackendBucketsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionBackendBucketsTransport]] +_transport_registry["rest"] = RegionBackendBucketsRestTransport + +__all__ = ( + "RegionBackendBucketsTransport", + "RegionBackendBucketsRestTransport", + "RegionBackendBucketsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/base.py new file mode 100644 index 000000000000..7ae78002cf28 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/base.py @@ -0,0 +1,348 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import region_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionBackendBucketsTransport(abc.ABC): + """Abstract transport class for RegionBackendBuckets.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=600.0, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=600.0, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_usable: gapic_v1.method.wrap_method( + self.list_usable, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=600.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteRegionBackendBucketRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionBackendBucketRequest], + Union[compute.BackendBucket, Awaitable[compute.BackendBucket]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyRegionBackendBucketRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertRegionBackendBucketRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionBackendBucketsRequest], + Union[compute.BackendBucketList, Awaitable[compute.BackendBucketList]], + ]: + raise NotImplementedError() + + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableRegionBackendBucketsRequest], + Union[ + compute.BackendBucketListUsable, Awaitable[compute.BackendBucketListUsable] + ], + ]: + raise NotImplementedError() + + @property + def patch( + self, + ) -> Callable[ + [compute.PatchRegionBackendBucketRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyRegionBackendBucketRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionBackendBucketRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("RegionBackendBucketsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/rest.py new file mode 100644 index 000000000000..251340f14811 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/rest.py @@ -0,0 +1,2346 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseRegionBackendBucketsRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionBackendBucketsRestInterceptor: + """Interceptor for RegionBackendBuckets. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionBackendBucketsRestTransport. + + .. code-block:: python + class MyCustomRegionBackendBucketsInterceptor(RegionBackendBucketsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_usable(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_usable(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionBackendBucketsRestTransport(interceptor=MyCustomRegionBackendBucketsInterceptor()) + client = RegionBackendBucketsClient(transport=transport) + + + """ + + def pre_delete( + self, + request: compute.DeleteRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteRegionBackendBucketRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. + """ + return response + + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + + def pre_get( + self, + request: compute.GetRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetRegionBackendBucketRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_get(self, response: compute.BackendBucket) -> compute.BackendBucket: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.BackendBucket, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendBucket, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: compute.GetIamPolicyRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetIamPolicyRegionBackendBucketRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_insert( + self, + request: compute.InsertRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InsertRegionBackendBucketRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. + """ + return response + + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + + def pre_list( + self, + request: compute.ListRegionBackendBucketsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListRegionBackendBucketsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_list( + self, response: compute.BackendBucketList + ) -> compute.BackendBucketList: + """Post-rpc interceptor for list + + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. + """ + return response + + def post_list_with_metadata( + self, + response: compute.BackendBucketList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.BackendBucketList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + + def pre_list_usable( + self, + request: compute.ListUsableRegionBackendBucketsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListUsableRegionBackendBucketsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_usable + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_list_usable( + self, response: compute.BackendBucketListUsable + ) -> compute.BackendBucketListUsable: + """Post-rpc interceptor for list_usable + + DEPRECATED. Please use the `post_list_usable_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_list_usable` interceptor runs + before the `post_list_usable_with_metadata` interceptor. + """ + return response + + def post_list_usable_with_metadata( + self, + response: compute.BackendBucketListUsable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.BackendBucketListUsable, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_usable + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_list_usable_with_metadata` + interceptor in new development instead of the `post_list_usable` interceptor. + When both interceptors are used, this `post_list_usable_with_metadata` interceptor runs after the + `post_list_usable` interceptor. The (possibly modified) response returned by + `post_list_usable` will be passed to + `post_list_usable_with_metadata`. + """ + return response, metadata + + def pre_patch( + self, + request: compute.PatchRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PatchRegionBackendBucketRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. + """ + return response + + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy( + self, + request: compute.SetIamPolicyRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SetIamPolicyRegionBackendBucketRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsRegionBackendBucketRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestIamPermissionsRegionBackendBucketRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionBackendBuckets server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionBackendBuckets server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionBackendBuckets server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class RegionBackendBucketsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionBackendBucketsRestInterceptor + + +class RegionBackendBucketsRestTransport(_BaseRegionBackendBucketsRestTransport): + """REST backend synchronous transport for RegionBackendBuckets. + + The RegionBackendBuckets API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RegionBackendBucketsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[RegionBackendBucketsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionBackendBucketsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete( + _BaseRegionBackendBucketsRestTransport._BaseDelete, RegionBackendBucketsRestStub + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.Delete") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.DeleteRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionBackendBucketsRestTransport._BaseDelete._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseDelete._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BaseDelete._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.Delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Delete", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._Delete._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Delete", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Get( + _BaseRegionBackendBucketsRestTransport._BaseGet, RegionBackendBucketsRestStub + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.BackendBucket: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.BackendBucket: + Represents a Cloud Storage Bucket + resource. + This Cloud Storage bucket resource is + referenced by a URL map of a load + balancer. For more information, + readBackend Buckets. + + """ + + http_options = ( + _BaseRegionBackendBucketsRestTransport._BaseGet._get_http_options() + ) + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = ( + _BaseRegionBackendBucketsRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionBackendBucketsRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucket() + pb_resp = compute.BackendBucket.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.BackendBucket.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy( + _BaseRegionBackendBucketsRestTransport._BaseGetIamPolicy, + RegionBackendBucketsRestStub, + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetIamPolicyRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.GetIamPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseRegionBackendBucketsRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.get_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Insert( + _BaseRegionBackendBucketsRestTransport._BaseInsert, RegionBackendBucketsRestStub + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.Insert") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.InsertRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionBackendBucketsRestTransport._BaseInsert._get_http_options() + ) + + request, metadata = self._interceptor.pre_insert(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseInsert._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionBackendBucketsRestTransport._BaseInsert._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BaseInsert._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.Insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Insert", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._Insert._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Insert", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _List( + _BaseRegionBackendBucketsRestTransport._BaseList, RegionBackendBucketsRestStub + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.List") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListRegionBackendBucketsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.BackendBucketList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionBackendBucketsRequest): + The request object. A request message for + RegionBackendBuckets.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.BackendBucketList: + Contains a list of BackendBucket + resources. + + """ + + http_options = ( + _BaseRegionBackendBucketsRestTransport._BaseList._get_http_options() + ) + + request, metadata = self._interceptor.pre_list(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = ( + _BaseRegionBackendBucketsRestTransport._BaseList._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.List", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "List", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._List._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucketList() + pb_resp = compute.BackendBucketList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.BackendBucketList.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.list", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "List", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListUsable( + _BaseRegionBackendBucketsRestTransport._BaseListUsable, + RegionBackendBucketsRestStub, + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.ListUsable") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListUsableRegionBackendBucketsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.BackendBucketListUsable: + r"""Call the list usable method over HTTP. + + Args: + request (~.compute.ListUsableRegionBackendBucketsRequest): + The request object. A request message for + RegionBackendBuckets.ListUsable. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.BackendBucketListUsable: + + """ + + http_options = _BaseRegionBackendBucketsRestTransport._BaseListUsable._get_http_options() + + request, metadata = self._interceptor.pre_list_usable(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseListUsable._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BaseListUsable._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.ListUsable", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "ListUsable", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._ListUsable._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.BackendBucketListUsable() + pb_resp = compute.BackendBucketListUsable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_usable(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_usable_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.BackendBucketListUsable.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.list_usable", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "ListUsable", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Patch( + _BaseRegionBackendBucketsRestTransport._BasePatch, RegionBackendBucketsRestStub + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.Patch") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.PatchRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionBackendBucketsRestTransport._BasePatch._get_http_options() + ) + + request, metadata = self._interceptor.pre_patch(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BasePatch._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionBackendBucketsRestTransport._BasePatch._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BasePatch._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.Patch", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Patch", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._Patch._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.patch", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "Patch", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy( + _BaseRegionBackendBucketsRestTransport._BaseSetIamPolicy, + RegionBackendBucketsRestStub, + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.SetIamPolicyRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.SetIamPolicy. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseRegionBackendBucketsRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionBackendBucketsRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionBackendBucketsRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.set_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions( + _BaseRegionBackendBucketsRestTransport._BaseTestIamPermissions, + RegionBackendBucketsRestStub, + ): + def __hash__(self): + return hash("RegionBackendBucketsRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.TestIamPermissionsRegionBackendBucketRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsRegionBackendBucketRequest): + The request object. A request message for + RegionBackendBuckets.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options = _BaseRegionBackendBucketsRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseRegionBackendBucketsRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionBackendBucketsRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionBackendBucketsRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionBackendBucketsClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + RegionBackendBucketsRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.TestPermissionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionBackendBucketsClient.test_iam_permissions", + extra={ + "serviceName": "google.cloud.compute.v1.RegionBackendBuckets", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionBackendBucketRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[[compute.GetRegionBackendBucketRequest], compute.BackendBucket]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyRegionBackendBucketRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionBackendBucketRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[[compute.ListRegionBackendBucketsRequest], compute.BackendBucketList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_usable( + self, + ) -> Callable[ + [compute.ListUsableRegionBackendBucketsRequest], compute.BackendBucketListUsable + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListUsable(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionBackendBucketRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyRegionBackendBucketRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionBackendBucketRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RegionBackendBucketsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/rest_base.py new file mode 100644 index 000000000000..dd46784f8f32 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_backend_buckets/transports/rest_base.py @@ -0,0 +1,547 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, RegionBackendBucketsTransport + + +class _BaseRegionBackendBucketsRestTransport(RegionBackendBucketsTransport): + """Base REST backend transport for RegionBackendBuckets. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDelete: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.DeleteRegionBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseDelete._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetRegionBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/{resource}/getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetIamPolicyRegionBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseGetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseInsert: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets", + "body": "backend_bucket_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.InsertRegionBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseInsert._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListRegionBackendBucketsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseList._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseListUsable: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/listUsable", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListUsableRegionBackendBucketsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseListUsable._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BasePatch: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}", + "body": "backend_bucket_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.PatchRegionBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BasePatch._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.SetIamPolicyRegionBackendBucketRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseSetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/backendBuckets/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.TestIamPermissionsRegionBackendBucketRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionBackendBucketsRestTransport._BaseTestIamPermissions._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseRegionBackendBucketsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/client.py index e6515b2ad883..29c80d5fe3eb 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/client.py @@ -1297,6 +1297,146 @@ def sample_get(): # Done; return the response. return response + def get_health( + self, + request: Optional[ + Union[compute.GetHealthRegionCompositeHealthCheckRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + composite_health_check: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.CompositeHealthCheckHealth: + r"""Gets the most recent health check results for this + regional CompositeHealthCheck. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_health(): + # Create a client + client = compute_v1.RegionCompositeHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthRegionCompositeHealthCheckRequest( + composite_health_check="composite_health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetHealthRegionCompositeHealthCheckRequest, dict]): + The request object. A request message for + RegionCompositeHealthChecks.GetHealth. + See the method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + composite_health_check (str): + Name of the CompositeHealthCheck + resource to get health for. + + This corresponds to the ``composite_health_check`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.CompositeHealthCheckHealth: + Response message for + RegionCompositeHealthChecks.GetHealth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, composite_health_check] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetHealthRegionCompositeHealthCheckRequest): + request = compute.GetHealthRegionCompositeHealthCheckRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if composite_health_check is not None: + request.composite_health_check = composite_health_check + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("composite_health_check", request.composite_health_check), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def insert_unary( self, request: Optional[ diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/base.py index 1a16402c9d59..a050e107df1f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/base.py @@ -182,6 +182,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=600.0, @@ -253,6 +268,18 @@ def get( ]: raise NotImplementedError() + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionCompositeHealthCheckRequest], + Union[ + compute.CompositeHealthCheckHealth, + Awaitable[compute.CompositeHealthCheckHealth], + ], + ]: + raise NotImplementedError() + @property def insert( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest.py index 08591735c4b4..f6460f8d6c5f 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest.py @@ -96,6 +96,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -282,6 +290,57 @@ def post_get_with_metadata( """ return response, metadata + def pre_get_health( + self, + request: compute.GetHealthRegionCompositeHealthCheckRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetHealthRegionCompositeHealthCheckRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionCompositeHealthChecks server. + """ + return request, metadata + + def post_get_health( + self, response: compute.CompositeHealthCheckHealth + ) -> compute.CompositeHealthCheckHealth: + """Post-rpc interceptor for get_health + + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionCompositeHealthChecks server but before + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. + """ + return response + + def post_get_health_with_metadata( + self, + response: compute.CompositeHealthCheckHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.CompositeHealthCheckHealth, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionCompositeHealthChecks server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionCompositeHealthCheckRequest, @@ -1053,6 +1112,157 @@ def __call__( ) return resp + class _GetHealth( + _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth, + RegionCompositeHealthChecksRestStub, + ): + def __hash__(self): + return hash("RegionCompositeHealthChecksRestTransport.GetHealth") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetHealthRegionCompositeHealthCheckRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.CompositeHealthCheckHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthRegionCompositeHealthCheckRequest): + The request object. A request message for + RegionCompositeHealthChecks.GetHealth. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.CompositeHealthCheckHealth: + Response message for + RegionCompositeHealthChecks.GetHealth + + """ + + http_options = _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_http_options() + + request, metadata = self._interceptor.pre_get_health(request, metadata) + transcoded_request = _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionCompositeHealthChecksClient.GetHealth", + extra={ + "serviceName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "rpcName": "GetHealth", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + RegionCompositeHealthChecksRestTransport._GetHealth._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.CompositeHealthCheckHealth() + pb_resp = compute.CompositeHealthCheckHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.CompositeHealthCheckHealth.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionCompositeHealthChecksClient.get_health", + extra={ + "serviceName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "rpcName": "GetHealth", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _Insert( _BaseRegionCompositeHealthChecksRestTransport._BaseInsert, RegionCompositeHealthChecksRestStub, @@ -1738,6 +1948,17 @@ def get( # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionCompositeHealthCheckRequest], + compute.CompositeHealthCheckHealth, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest_base.py index 519af299be6f..1478eacb8b69 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_composite_health_checks/transports/rest_base.py @@ -229,6 +229,52 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseGetHealth: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/compositeHealthChecks/{composite_health_check}/getHealth", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetHealthRegionCompositeHealthCheckRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionCompositeHealthChecksRestTransport._BaseGetHealth._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BaseInsert: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py index a988ec77a72c..b046f7a587f9 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/client.py @@ -5242,6 +5242,339 @@ def error_code(self): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[Union[compute.UpdateKmsKeyRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disk_update_kms_key_request_resource: Optional[ + compute.RegionDiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeyRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disk_update_kms_key_request_resource (google.cloud.compute_v1.types.RegionDiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + disk, + region_disk_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionDiskRequest): + request = compute.UpdateKmsKeyRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disk_update_kms_key_request_resource is not None: + request.region_disk_update_kms_key_request_resource = ( + region_disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[Union[compute.UpdateKmsKeyRegionDiskRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + disk: Optional[str] = None, + region_disk_update_kms_key_request_resource: Optional[ + compute.RegionDiskUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + persistent disk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeyRegionDiskRequest, dict]): + The request object. A request message for + RegionDisks.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + disk (str): + Name of the Disk resource, should + conform to RFC1035. + + This corresponds to the ``disk`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_disk_update_kms_key_request_resource (google.cloud.compute_v1.types.RegionDiskUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_disk_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + disk, + region_disk_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionDiskRequest): + request = compute.UpdateKmsKeyRegionDiskRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if disk is not None: + request.disk = disk + if region_disk_update_kms_key_request_resource is not None: + request.region_disk_update_kms_key_request_resource = ( + region_disk_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("disk", request.disk), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "RegionDisksClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/base.py index 887b3a353102..356bd58964db 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/base.py @@ -262,6 +262,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -427,6 +432,15 @@ def update( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeyRegionDiskRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py index 834c763bbbf4..68275967c12c 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest.py @@ -208,6 +208,14 @@ def post_update(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = RegionDisksRestTransport(interceptor=MyCustomRegionDisksInterceptor()) client = RegionDisksClient(transport=transport) @@ -1010,6 +1018,52 @@ def post_update_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeyRegionDiskRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeyRegionDiskRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionDisks server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionDisks server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionDisks server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class RegionDisksRestStub: @@ -4180,6 +4234,192 @@ def __call__( ) return resp + class _UpdateKmsKey( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey, RegionDisksRestStub + ): + def __hash__(self): + return hash("RegionDisksRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeyRegionDiskRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeyRegionDiskRequest): + The request object. A request message for + RegionDisks.UpdateKmsKey. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + ) + + body = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionDisksClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1.RegionDisks", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionDisksRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionDisksClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1.RegionDisks", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def add_resource_policies( self, @@ -4312,6 +4552,14 @@ def update(self) -> Callable[[compute.UpdateRegionDiskRequest], compute.Operatio # In C++ this would require a dynamic_cast return self._Update(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeyRegionDiskRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest_base.py index 7b19c46f7a21..a7e4b17ebc16 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_disks/transports/rest_base.py @@ -989,5 +989,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/disks/{disk}/updateKmsKey", + "body": "region_disk_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeyRegionDiskRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionDisksRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseRegionDisksRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/client.py index 5ba685079b02..5a31be6cc033 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/client.py @@ -1280,6 +1280,146 @@ def sample_get(): # Done; return the response. return response + def get_health( + self, + request: Optional[ + Union[compute.GetHealthRegionHealthSourceRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + health_source: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.HealthSourceHealth: + r"""Gets the most recent health check results for this + regional HealthSource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_health(): + # Create a client + client = compute_v1.RegionHealthSourcesClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthRegionHealthSourceRequest( + health_source="health_source_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetHealthRegionHealthSourceRequest, dict]): + The request object. A request message for + RegionHealthSources.GetHealth. See the + method description for details. + project (str): + Name of the project scoping this + request. + + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region scoping this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + health_source (str): + Name of the HealthSource resource to + get health for. + + This corresponds to the ``health_source`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.HealthSourceHealth: + Response message for + RegionHealthSources.GetHealth + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, health_source] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetHealthRegionHealthSourceRequest): + request = compute.GetHealthRegionHealthSourceRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if health_source is not None: + request.health_source = health_source + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_health] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("health_source", request.health_source), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def insert_unary( self, request: Optional[Union[compute.InsertRegionHealthSourceRequest, dict]] = None, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/base.py index 1d02c6c22218..db2b61929336 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/base.py @@ -182,6 +182,21 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.get_health: gapic_v1.method.wrap_method( + self.get_health, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), self.insert: gapic_v1.method.wrap_method( self.insert, default_timeout=600.0, @@ -253,6 +268,15 @@ def get( ]: raise NotImplementedError() + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionHealthSourceRequest], + Union[compute.HealthSourceHealth, Awaitable[compute.HealthSourceHealth]], + ]: + raise NotImplementedError() + @property def insert( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest.py index 3e6d8d833cc7..d7987dcbee98 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest.py @@ -96,6 +96,14 @@ def post_get(self, response): logging.log(f"Received response: {response}") return response + def pre_get_health(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_health(self, response): + logging.log(f"Received response: {response}") + return response + def pre_insert(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -277,6 +285,55 @@ def post_get_with_metadata( """ return response, metadata + def pre_get_health( + self, + request: compute.GetHealthRegionHealthSourceRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetHealthRegionHealthSourceRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_health + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionHealthSources server. + """ + return request, metadata + + def post_get_health( + self, response: compute.HealthSourceHealth + ) -> compute.HealthSourceHealth: + """Post-rpc interceptor for get_health + + DEPRECATED. Please use the `post_get_health_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionHealthSources server but before + it is returned to user code. This `post_get_health` interceptor runs + before the `post_get_health_with_metadata` interceptor. + """ + return response + + def post_get_health_with_metadata( + self, + response: compute.HealthSourceHealth, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.HealthSourceHealth, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_health + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionHealthSources server but before it is returned to user code. + + We recommend only using this `post_get_health_with_metadata` + interceptor in new development instead of the `post_get_health` interceptor. + When both interceptors are used, this `post_get_health_with_metadata` interceptor runs after the + `post_get_health` interceptor. The (possibly modified) response returned by + `post_get_health` will be passed to + `post_get_health_with_metadata`. + """ + return response, metadata + def pre_insert( self, request: compute.InsertRegionHealthSourceRequest, @@ -1042,6 +1099,155 @@ def __call__( ) return resp + class _GetHealth( + _BaseRegionHealthSourcesRestTransport._BaseGetHealth, + RegionHealthSourcesRestStub, + ): + def __hash__(self): + return hash("RegionHealthSourcesRestTransport.GetHealth") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetHealthRegionHealthSourceRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.HealthSourceHealth: + r"""Call the get health method over HTTP. + + Args: + request (~.compute.GetHealthRegionHealthSourceRequest): + The request object. A request message for + RegionHealthSources.GetHealth. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.HealthSourceHealth: + Response message for + RegionHealthSources.GetHealth + + """ + + http_options = ( + _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_health(request, metadata) + transcoded_request = _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionHealthSourcesClient.GetHealth", + extra={ + "serviceName": "google.cloud.compute.v1.RegionHealthSources", + "rpcName": "GetHealth", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionHealthSourcesRestTransport._GetHealth._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.HealthSourceHealth() + pb_resp = compute.HealthSourceHealth.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_health(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_health_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.HealthSourceHealth.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionHealthSourcesClient.get_health", + extra={ + "serviceName": "google.cloud.compute.v1.RegionHealthSources", + "rpcName": "GetHealth", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _Insert( _BaseRegionHealthSourcesRestTransport._BaseInsert, RegionHealthSourcesRestStub ): @@ -1736,6 +1942,16 @@ def get( # In C++ this would require a dynamic_cast return self._Get(self._session, self._host, self._interceptor) # type: ignore + @property + def get_health( + self, + ) -> Callable[ + [compute.GetHealthRegionHealthSourceRequest], compute.HealthSourceHealth + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetHealth(self._session, self._host, self._interceptor) # type: ignore + @property def insert( self, diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest_base.py index 3ae058526150..da212acaaa81 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_health_sources/transports/rest_base.py @@ -225,6 +225,52 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseGetHealth: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/healthSources/{health_source}/getHealth", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetHealthRegionHealthSourceRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionHealthSourcesRestTransport._BaseGetHealth._get_unset_required_fields( + query_params + ) + ) + + return query_params + class _BaseInsert: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/__init__.py new file mode 100644 index 000000000000..b79d9c6b0cb1 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionInstanceGroupManagerResizeRequestsClient + +__all__ = ("RegionInstanceGroupManagerResizeRequestsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/client.py new file mode 100644 index 000000000000..c69aeb64b949 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/client.py @@ -0,0 +1,2119 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.services.region_instance_group_manager_resize_requests import ( + pagers, +) +from google.cloud.compute_v1.types import compute + +from .transports.base import ( + DEFAULT_CLIENT_INFO, + RegionInstanceGroupManagerResizeRequestsTransport, +) +from .transports.rest import RegionInstanceGroupManagerResizeRequestsRestTransport + + +class RegionInstanceGroupManagerResizeRequestsClientMeta(type): + """Metaclass for the RegionInstanceGroupManagerResizeRequests client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceGroupManagerResizeRequestsTransport]] + _transport_registry["rest"] = RegionInstanceGroupManagerResizeRequestsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RegionInstanceGroupManagerResizeRequestsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionInstanceGroupManagerResizeRequestsClient( + metaclass=RegionInstanceGroupManagerResizeRequestsClientMeta +): + """The RegionInstanceGroupManagerResizeRequests API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceGroupManagerResizeRequestsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstanceGroupManagerResizeRequestsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionInstanceGroupManagerResizeRequestsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionInstanceGroupManagerResizeRequestsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + ) + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + ) + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ( + RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + ) + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ( + RegionInstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + else: + api_endpoint = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ( + RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + ) + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + RegionInstanceGroupManagerResizeRequestsTransport, + Callable[..., RegionInstanceGroupManagerResizeRequestsTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region instance group manager resize requests client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionInstanceGroupManagerResizeRequestsTransport,Callable[..., RegionInstanceGroupManagerResizeRequestsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstanceGroupManagerResizeRequestsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + ) + self._client_cert_source = ( + RegionInstanceGroupManagerResizeRequestsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ( + RegionInstanceGroupManagerResizeRequestsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance( + transport, RegionInstanceGroupManagerResizeRequestsTransport + ) + if transport_provided: + # transport is a RegionInstanceGroupManagerResizeRequestsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast( + RegionInstanceGroupManagerResizeRequestsTransport, transport + ) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RegionInstanceGroupManagerResizeRequestsTransport], + Callable[..., RegionInstanceGroupManagerResizeRequestsTransport], + ] = ( + RegionInstanceGroupManagerResizeRequestsClient.get_transport_class( + transport + ) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RegionInstanceGroupManagerResizeRequestsTransport], + transport, + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "credentialsType": None, + }, + ) + + def cancel_unary( + self, + request: Optional[ + Union[compute.CancelRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Cancels the specified resize request. + Cancelled resize request no longer waits for the + resources to be provisioned. Cancel is only possible for + requests that are in accepted state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_cancel(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.CancelRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.cancel(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CancelRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Cancel. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. Name should conform to RFC1035 or + be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + cancel. Name should conform to RFC1035 + or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instance_group_manager, resize_request] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.CancelRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.CancelRegionInstanceGroupManagerResizeRequestRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def cancel( + self, + request: Optional[ + Union[compute.CancelRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Cancels the specified resize request. + Cancelled resize request no longer waits for the + resources to be provisioned. Cancel is only possible for + requests that are in accepted state. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_cancel(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.CancelRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.cancel(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.CancelRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Cancel. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. Name should conform to RFC1035 or + be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + cancel. Name should conform to RFC1035 + or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instance_group_manager, resize_request] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.CancelRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.CancelRegionInstanceGroupManagerResizeRequestRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def delete_unary( + self, + request: Optional[ + Union[compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Deletes the specified, inactive resize request. + Requests that are still active cannot be deleted. + Deleting request does not delete instances that were + provisioned previously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. Name should conform to RFC1035 or + be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + delete. Name should conform to RFC1035 + or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instance_group_manager, resize_request] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.DeleteRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.DeleteRegionInstanceGroupManagerResizeRequestRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[ + Union[compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified, inactive resize request. + Requests that are still active cannot be deleted. + Deleting request does not delete instances that were + provisioned previously. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Delete. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. Name should conform to RFC1035 or + be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request to + delete. Name should conform to RFC1035 + or be a resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instance_group_manager, resize_request] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.DeleteRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.DeleteRegionInstanceGroupManagerResizeRequestRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[ + Union[compute.GetRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + resize_request: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.InstanceGroupManagerResizeRequest: + r"""Returns all of the details about the specified resize + request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Get. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. Name should conform to RFC1035 or + be a resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resize_request (str): + The name of the resize request. + Name should conform to RFC1035 or be a + resource ID. + + This corresponds to the ``resize_request`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest: + InstanceGroupManagerResizeRequest + represents a request to create a number + of VMs: either immediately or by queuing + the request for the specified time. This + resize request is nested under + InstanceGroupManager and the VMs created + by this request are added to the owning + InstanceGroupManager. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instance_group_manager, resize_request] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.GetRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.GetRegionInstanceGroupManagerResizeRequestRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if resize_request is not None: + request.resize_request = resize_request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ("resize_request", request.resize_request), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[ + Union[compute.InsertRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resize_request_resource: Optional[ + compute.InstanceGroupManagerResizeRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Creates a new Resize Request that starts provisioning + VMs immediately or queues VM creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group to + which the resize request is scoped. Name + should conform to RFC1035 or be a + resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resize_request_resource (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest): + The body resource for this request + This corresponds to the ``instance_group_manager_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + instance_group_manager, + instance_group_manager_resize_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.InsertRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.InsertRegionInstanceGroupManagerResizeRequestRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resize_request_resource is not None: + request.instance_group_manager_resize_request_resource = ( + instance_group_manager_resize_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[ + Union[compute.InsertRegionInstanceGroupManagerResizeRequestRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + instance_group_manager_resize_request_resource: Optional[ + compute.InstanceGroupManagerResizeRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new Resize Request that starts provisioning + VMs immediately or queues VM creation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerResizeRequestRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Insert. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + Name of the managed instance group to + which the resize request is scoped. Name + should conform to RFC1035 or be a + resource ID. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager_resize_request_resource (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest): + The body resource for this request + This corresponds to the ``instance_group_manager_resize_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + instance_group_manager, + instance_group_manager_resize_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.InsertRegionInstanceGroupManagerResizeRequestRequest + ): + request = compute.InsertRegionInstanceGroupManagerResizeRequestRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + if instance_group_manager_resize_request_resource is not None: + request.instance_group_manager_resize_request_resource = ( + instance_group_manager_resize_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[ + Union[compute.ListRegionInstanceGroupManagerResizeRequestsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instance_group_manager: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPager: + r"""Retrieves a list of Resize Requests that are + contained in the managed instance group. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceGroupManagerResizeRequestsRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionInstanceGroupManagerResizeRequestsRequest, dict]): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.List. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region + scoping this request. Name should + conform to RFC1035. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instance_group_manager (str): + The name of the managed instance + group. The name should conform to + RFC1035. + + This corresponds to the ``instance_group_manager`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instance_group_manager] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.ListRegionInstanceGroupManagerResizeRequestsRequest + ): + request = compute.ListRegionInstanceGroupManagerResizeRequestsRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instance_group_manager is not None: + request.instance_group_manager = instance_group_manager + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instance_group_manager", request.instance_group_manager), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionInstanceGroupManagerResizeRequestsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("RegionInstanceGroupManagerResizeRequestsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/pagers.py new file mode 100644 index 000000000000..82c697d224ee --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/pagers.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagerResizeRequestsListResponse` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.RegionInstanceGroupManagerResizeRequestsListResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., compute.RegionInstanceGroupManagerResizeRequestsListResponse + ], + request: compute.ListRegionInstanceGroupManagerResizeRequestsRequest, + response: compute.RegionInstanceGroupManagerResizeRequestsListResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionInstanceGroupManagerResizeRequestsRequest): + The initial request object. + response (google.cloud.compute_v1.types.RegionInstanceGroupManagerResizeRequestsListResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListRegionInstanceGroupManagerResizeRequestsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[compute.RegionInstanceGroupManagerResizeRequestsListResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.InstanceGroupManagerResizeRequest]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/README.rst new file mode 100644 index 000000000000..ac5841a6d546 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``RegionInstanceGroupManagerResizeRequestsTransport`` is the ABC for all transports. + +- public child ``RegionInstanceGroupManagerResizeRequestsGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``RegionInstanceGroupManagerResizeRequestsGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseRegionInstanceGroupManagerResizeRequestsRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``RegionInstanceGroupManagerResizeRequestsRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/__init__.py new file mode 100644 index 000000000000..b3dbb91be6ed --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionInstanceGroupManagerResizeRequestsTransport +from .rest import ( + RegionInstanceGroupManagerResizeRequestsRestInterceptor, + RegionInstanceGroupManagerResizeRequestsRestTransport, +) + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstanceGroupManagerResizeRequestsTransport]] +_transport_registry["rest"] = RegionInstanceGroupManagerResizeRequestsRestTransport + +__all__ = ( + "RegionInstanceGroupManagerResizeRequestsTransport", + "RegionInstanceGroupManagerResizeRequestsRestTransport", + "RegionInstanceGroupManagerResizeRequestsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/base.py new file mode 100644 index 000000000000..a87a3ddaf931 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/base.py @@ -0,0 +1,274 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import region_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionInstanceGroupManagerResizeRequestsTransport(abc.ABC): + """Abstract transport class for RegionInstanceGroupManagerResizeRequests.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.cancel: gapic_v1.method.wrap_method( + self.cancel, + default_timeout=600.0, + client_info=client_info, + ), + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=600.0, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=600.0, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def cancel( + self, + ) -> Callable[ + [compute.CancelRegionInstanceGroupManagerResizeRequestRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteRegionInstanceGroupManagerResizeRequestRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionInstanceGroupManagerResizeRequestRequest], + Union[ + compute.InstanceGroupManagerResizeRequest, + Awaitable[compute.InstanceGroupManagerResizeRequest], + ], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertRegionInstanceGroupManagerResizeRequestRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionInstanceGroupManagerResizeRequestsRequest], + Union[ + compute.RegionInstanceGroupManagerResizeRequestsListResponse, + Awaitable[compute.RegionInstanceGroupManagerResizeRequestsListResponse], + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("RegionInstanceGroupManagerResizeRequestsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/rest.py new file mode 100644 index 000000000000..33eaf5e0a805 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/rest.py @@ -0,0 +1,1358 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseRegionInstanceGroupManagerResizeRequestsRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionInstanceGroupManagerResizeRequestsRestInterceptor: + """Interceptor for RegionInstanceGroupManagerResizeRequests. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionInstanceGroupManagerResizeRequestsRestTransport. + + .. code-block:: python + class MyCustomRegionInstanceGroupManagerResizeRequestsInterceptor(RegionInstanceGroupManagerResizeRequestsRestInterceptor): + def pre_cancel(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_cancel(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionInstanceGroupManagerResizeRequestsRestTransport(interceptor=MyCustomRegionInstanceGroupManagerResizeRequestsInterceptor()) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + + + """ + + def pre_cancel( + self, + request: compute.CancelRegionInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.CancelRegionInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for cancel + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_cancel(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for cancel + + DEPRECATED. Please use the `post_cancel_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstanceGroupManagerResizeRequests server but before + it is returned to user code. This `post_cancel` interceptor runs + before the `post_cancel_with_metadata` interceptor. + """ + return response + + def post_cancel_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for cancel + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_cancel_with_metadata` + interceptor in new development instead of the `post_cancel` interceptor. + When both interceptors are used, this `post_cancel_with_metadata` interceptor runs after the + `post_cancel` interceptor. The (possibly modified) response returned by + `post_cancel` will be passed to + `post_cancel_with_metadata`. + """ + return response, metadata + + def pre_delete( + self, + request: compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstanceGroupManagerResizeRequests server but before + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. + """ + return response + + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + + def pre_get( + self, + request: compute.GetRegionInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetRegionInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_get( + self, response: compute.InstanceGroupManagerResizeRequest + ) -> compute.InstanceGroupManagerResizeRequest: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstanceGroupManagerResizeRequests server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.InstanceGroupManagerResizeRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InstanceGroupManagerResizeRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_insert( + self, + request: compute.InsertRegionInstanceGroupManagerResizeRequestRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InsertRegionInstanceGroupManagerResizeRequestRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstanceGroupManagerResizeRequests server but before + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. + """ + return response + + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + + def pre_list( + self, + request: compute.ListRegionInstanceGroupManagerResizeRequestsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListRegionInstanceGroupManagerResizeRequestsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstanceGroupManagerResizeRequests server. + """ + return request, metadata + + def post_list( + self, response: compute.RegionInstanceGroupManagerResizeRequestsListResponse + ) -> compute.RegionInstanceGroupManagerResizeRequestsListResponse: + """Post-rpc interceptor for list + + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstanceGroupManagerResizeRequests server but before + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. + """ + return response + + def post_list_with_metadata( + self, + response: compute.RegionInstanceGroupManagerResizeRequestsListResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.RegionInstanceGroupManagerResizeRequestsListResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstanceGroupManagerResizeRequests server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class RegionInstanceGroupManagerResizeRequestsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionInstanceGroupManagerResizeRequestsRestInterceptor + + +class RegionInstanceGroupManagerResizeRequestsRestTransport( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport +): + """REST backend synchronous transport for RegionInstanceGroupManagerResizeRequests. + + The RegionInstanceGroupManagerResizeRequests API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ + RegionInstanceGroupManagerResizeRequestsRestInterceptor + ] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[RegionInstanceGroupManagerResizeRequestsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = ( + interceptor or RegionInstanceGroupManagerResizeRequestsRestInterceptor() + ) + self._prep_wrapped_messages(client_info) + + class _Cancel( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseCancel, + RegionInstanceGroupManagerResizeRequestsRestStub, + ): + def __hash__(self): + return hash("RegionInstanceGroupManagerResizeRequestsRestTransport.Cancel") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.CancelRegionInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the cancel method over HTTP. + + Args: + request (~.compute.CancelRegionInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Cancel. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseCancel._get_http_options() + + request, metadata = self._interceptor.pre_cancel(request, metadata) + transcoded_request = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseCancel._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseCancel._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.Cancel", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Cancel", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstanceGroupManagerResizeRequestsRestTransport._Cancel._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_cancel(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_cancel_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.cancel", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Cancel", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Delete( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseDelete, + RegionInstanceGroupManagerResizeRequestsRestStub, + ): + def __hash__(self): + return hash("RegionInstanceGroupManagerResizeRequestsRestTransport.Delete") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Delete. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseDelete._get_http_options() + + request, metadata = self._interceptor.pre_delete(request, metadata) + transcoded_request = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseDelete._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseDelete._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.Delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Delete", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstanceGroupManagerResizeRequestsRestTransport._Delete._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Delete", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Get( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseGet, + RegionInstanceGroupManagerResizeRequestsRestStub, + ): + def __hash__(self): + return hash("RegionInstanceGroupManagerResizeRequestsRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetRegionInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.InstanceGroupManagerResizeRequest: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Get. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.InstanceGroupManagerResizeRequest: + InstanceGroupManagerResizeRequest + represents a request to create a number + of VMs: either immediately or by queuing + the request for the specified time. This + resize request is nested under + InstanceGroupManager and the VMs created + by this request are added to the owning + InstanceGroupManager. + + """ + + http_options = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseGet._get_http_options() + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstanceGroupManagerResizeRequestsRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstanceGroupManagerResizeRequest() + pb_resp = compute.InstanceGroupManagerResizeRequest.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + compute.InstanceGroupManagerResizeRequest.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Insert( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseInsert, + RegionInstanceGroupManagerResizeRequestsRestStub, + ): + def __hash__(self): + return hash("RegionInstanceGroupManagerResizeRequestsRestTransport.Insert") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.InsertRegionInstanceGroupManagerResizeRequestRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionInstanceGroupManagerResizeRequestRequest): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.Insert. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseInsert._get_http_options() + + request, metadata = self._interceptor.pre_insert(request, metadata) + transcoded_request = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseInsert._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseInsert._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseInsert._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.Insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Insert", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstanceGroupManagerResizeRequestsRestTransport._Insert._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "Insert", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _List( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseList, + RegionInstanceGroupManagerResizeRequestsRestStub, + ): + def __hash__(self): + return hash("RegionInstanceGroupManagerResizeRequestsRestTransport.List") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListRegionInstanceGroupManagerResizeRequestsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.RegionInstanceGroupManagerResizeRequestsListResponse: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionInstanceGroupManagerResizeRequestsRequest): + The request object. A request message for + RegionInstanceGroupManagerResizeRequests.List. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.RegionInstanceGroupManagerResizeRequestsListResponse: + + """ + + http_options = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseList._get_http_options() + + request, metadata = self._interceptor.pre_list(request, metadata) + transcoded_request = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.List", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "List", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstanceGroupManagerResizeRequestsRestTransport._List._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.RegionInstanceGroupManagerResizeRequestsListResponse() + pb_resp = compute.RegionInstanceGroupManagerResizeRequestsListResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.RegionInstanceGroupManagerResizeRequestsListResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.list", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "rpcName": "List", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def cancel( + self, + ) -> Callable[ + [compute.CancelRegionInstanceGroupManagerResizeRequestRequest], + compute.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Cancel(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteRegionInstanceGroupManagerResizeRequestRequest], + compute.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionInstanceGroupManagerResizeRequestRequest], + compute.InstanceGroupManagerResizeRequest, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertRegionInstanceGroupManagerResizeRequestRequest], + compute.Operation, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionInstanceGroupManagerResizeRequestsRequest], + compute.RegionInstanceGroupManagerResizeRequestsListResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RegionInstanceGroupManagerResizeRequestsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/rest_base.py new file mode 100644 index 000000000000..4e92c7067c87 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instance_group_manager_resize_requests/transports/rest_base.py @@ -0,0 +1,343 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, RegionInstanceGroupManagerResizeRequestsTransport + + +class _BaseRegionInstanceGroupManagerResizeRequestsRestTransport( + RegionInstanceGroupManagerResizeRequestsTransport +): + """Base REST backend transport for RegionInstanceGroupManagerResizeRequests. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCancel: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}/cancel", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + compute.CancelRegionInstanceGroupManagerResizeRequestRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseCancel._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseDelete: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + compute.DeleteRegionInstanceGroupManagerResizeRequestRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseDelete._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetRegionInstanceGroupManagerResizeRequestRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseInsert: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests", + "body": "instance_group_manager_resize_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = ( + compute.InsertRegionInstanceGroupManagerResizeRequestRequest.pb(request) + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseInsert._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListRegionInstanceGroupManagerResizeRequestsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstanceGroupManagerResizeRequestsRestTransport._BaseList._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseRegionInstanceGroupManagerResizeRequestsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/__init__.py new file mode 100644 index 000000000000..f43dd1ce6003 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionInstantSnapshotGroupsClient + +__all__ = ("RegionInstantSnapshotGroupsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/client.py new file mode 100644 index 000000000000..9104c9a75f19 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/client.py @@ -0,0 +1,2127 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.services.region_instant_snapshot_groups import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, RegionInstantSnapshotGroupsTransport +from .transports.rest import RegionInstantSnapshotGroupsRestTransport + + +class RegionInstantSnapshotGroupsClientMeta(type): + """Metaclass for the RegionInstantSnapshotGroups client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstantSnapshotGroupsTransport]] + _transport_registry["rest"] = RegionInstantSnapshotGroupsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RegionInstantSnapshotGroupsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionInstantSnapshotGroupsClient( + metaclass=RegionInstantSnapshotGroupsClientMeta +): + """The RegionInstantSnapshotGroups API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstantSnapshotGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionInstantSnapshotGroupsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionInstantSnapshotGroupsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionInstantSnapshotGroupsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = RegionInstantSnapshotGroupsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = RegionInstantSnapshotGroupsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RegionInstantSnapshotGroupsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RegionInstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + RegionInstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RegionInstantSnapshotGroupsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + RegionInstantSnapshotGroupsTransport, + Callable[..., RegionInstantSnapshotGroupsTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region instant snapshot groups client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionInstantSnapshotGroupsTransport,Callable[..., RegionInstantSnapshotGroupsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionInstantSnapshotGroupsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + RegionInstantSnapshotGroupsClient._read_environment_variables() + ) + self._client_cert_source = ( + RegionInstantSnapshotGroupsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = RegionInstantSnapshotGroupsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RegionInstantSnapshotGroupsTransport) + if transport_provided: + # transport is a RegionInstantSnapshotGroupsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(RegionInstantSnapshotGroupsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RegionInstantSnapshotGroupsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RegionInstantSnapshotGroupsTransport], + Callable[..., RegionInstantSnapshotGroupsTransport], + ] = ( + RegionInstantSnapshotGroupsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast( + Callable[..., RegionInstantSnapshotGroupsTransport], transport + ) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.RegionInstantSnapshotGroupsClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "credentialsType": None, + }, + ) + + def delete_unary( + self, + request: Optional[ + Union[compute.DeleteRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instant_snapshot_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""deletes a Regional InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group (str): + Name of the InstantSnapshotGroup + resource to delete. + + This corresponds to the ``instant_snapshot_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instant_snapshot_group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteRegionInstantSnapshotGroupRequest): + request = compute.DeleteRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instant_snapshot_group is not None: + request.instant_snapshot_group = instant_snapshot_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instant_snapshot_group", request.instant_snapshot_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[ + Union[compute.DeleteRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instant_snapshot_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""deletes a Regional InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.Delete. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group (str): + Name of the InstantSnapshotGroup + resource to delete. + + This corresponds to the ``instant_snapshot_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instant_snapshot_group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteRegionInstantSnapshotGroupRequest): + request = compute.DeleteRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instant_snapshot_group is not None: + request.instant_snapshot_group = instant_snapshot_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instant_snapshot_group", request.instant_snapshot_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[ + Union[compute.GetRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instant_snapshot_group: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.InstantSnapshotGroup: + r"""returns the specified InstantSnapshotGroup resource + in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group (str): + Name of the InstantSnapshotGroup + resource to return. + + This corresponds to the ``instant_snapshot_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.InstantSnapshotGroup: + Represents an InstantSnapshotGroup + resource. + An instant snapshot group is a set of + instant snapshots that represents a + point in time state of a consistency + group. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instant_snapshot_group] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetRegionInstantSnapshotGroupRequest): + request = compute.GetRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instant_snapshot_group is not None: + request.instant_snapshot_group = instant_snapshot_group + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("instant_snapshot_group", request.instant_snapshot_group), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[ + Union[compute.GetIamPolicyRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.GetIamPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.GetIamPolicyRegionInstantSnapshotGroupRequest + ): + request = compute.GetIamPolicyRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[ + Union[compute.InsertRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instant_snapshot_group_resource: Optional[compute.InstantSnapshotGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""creates a Regional InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group_resource (google.cloud.compute_v1.types.InstantSnapshotGroup): + The body resource for this request + This corresponds to the ``instant_snapshot_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instant_snapshot_group_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertRegionInstantSnapshotGroupRequest): + request = compute.InsertRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instant_snapshot_group_resource is not None: + request.instant_snapshot_group_resource = ( + instant_snapshot_group_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[ + Union[compute.InsertRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + instant_snapshot_group_resource: Optional[compute.InstantSnapshotGroup] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""creates a Regional InstantSnapshotGroup resource + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.Insert. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + instant_snapshot_group_resource (google.cloud.compute_v1.types.InstantSnapshotGroup): + The body resource for this request + This corresponds to the ``instant_snapshot_group_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, instant_snapshot_group_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertRegionInstantSnapshotGroupRequest): + request = compute.InsertRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if instant_snapshot_group_resource is not None: + request.instant_snapshot_group_resource = ( + instant_snapshot_group_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[ + Union[compute.ListRegionInstantSnapshotGroupsRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPager: + r"""retrieves the list of InstantSnapshotGroup resources + contained within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstantSnapshotGroupsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionInstantSnapshotGroupsRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.List. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.region_instant_snapshot_groups.pagers.ListPager: + Contains a list of + InstantSnapshotGroup resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListRegionInstantSnapshotGroupsRequest): + request = compute.ListRegionInstantSnapshotGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[ + Union[compute.SetIamPolicyRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[ + compute.RegionSetPolicyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.SetIamPolicy. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + region_set_policy_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.SetIamPolicyRegionInstantSnapshotGroupRequest + ): + request = compute.SetIamPolicyRegionInstantSnapshotGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = ( + region_set_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionInstantSnapshotGroupRequest, dict]): + The request object. A request message for + RegionInstantSnapshotGroups.TestIamPermissions. + See the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + test_permissions_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, compute.TestIamPermissionsRegionInstantSnapshotGroupRequest + ): + request = compute.TestIamPermissionsRegionInstantSnapshotGroupRequest( + request + ) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionInstantSnapshotGroupsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("RegionInstantSnapshotGroupsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/pagers.py new file mode 100644 index 000000000000..9f7d751981c3 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/pagers.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.ListInstantSnapshotGroups` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.ListInstantSnapshotGroups` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.ListInstantSnapshotGroups], + request: compute.ListRegionInstantSnapshotGroupsRequest, + response: compute.ListInstantSnapshotGroups, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionInstantSnapshotGroupsRequest): + The initial request object. + response (google.cloud.compute_v1.types.ListInstantSnapshotGroups): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListRegionInstantSnapshotGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.ListInstantSnapshotGroups]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.InstantSnapshotGroup]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/README.rst new file mode 100644 index 000000000000..c91ce3084e01 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``RegionInstantSnapshotGroupsTransport`` is the ABC for all transports. + +- public child ``RegionInstantSnapshotGroupsGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``RegionInstantSnapshotGroupsGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseRegionInstantSnapshotGroupsRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``RegionInstantSnapshotGroupsRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/__init__.py new file mode 100644 index 000000000000..2d7b729ea00a --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionInstantSnapshotGroupsTransport +from .rest import ( + RegionInstantSnapshotGroupsRestInterceptor, + RegionInstantSnapshotGroupsRestTransport, +) + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionInstantSnapshotGroupsTransport]] +_transport_registry["rest"] = RegionInstantSnapshotGroupsRestTransport + +__all__ = ( + "RegionInstantSnapshotGroupsTransport", + "RegionInstantSnapshotGroupsRestTransport", + "RegionInstantSnapshotGroupsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/base.py new file mode 100644 index 000000000000..34a85f3879c1 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/base.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import region_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionInstantSnapshotGroupsTransport(abc.ABC): + """Abstract transport class for RegionInstantSnapshotGroups.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=600.0, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=600.0, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=600.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteRegionInstantSnapshotGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionInstantSnapshotGroupRequest], + Union[compute.InstantSnapshotGroup, Awaitable[compute.InstantSnapshotGroup]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyRegionInstantSnapshotGroupRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertRegionInstantSnapshotGroupRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionInstantSnapshotGroupsRequest], + Union[ + compute.ListInstantSnapshotGroups, + Awaitable[compute.ListInstantSnapshotGroups], + ], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyRegionInstantSnapshotGroupRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionInstantSnapshotGroupRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("RegionInstantSnapshotGroupsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/rest.py new file mode 100644 index 000000000000..ebf0b9ce6b0f --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/rest.py @@ -0,0 +1,1901 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseRegionInstantSnapshotGroupsRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionInstantSnapshotGroupsRestInterceptor: + """Interceptor for RegionInstantSnapshotGroups. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionInstantSnapshotGroupsRestTransport. + + .. code-block:: python + class MyCustomRegionInstantSnapshotGroupsInterceptor(RegionInstantSnapshotGroupsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionInstantSnapshotGroupsRestTransport(interceptor=MyCustomRegionInstantSnapshotGroupsInterceptor()) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + + """ + + def pre_delete( + self, + request: compute.DeleteRegionInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteRegionInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. + """ + return response + + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + + def pre_get( + self, + request: compute.GetRegionInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetRegionInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_get( + self, response: compute.InstantSnapshotGroup + ) -> compute.InstantSnapshotGroup: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.InstantSnapshotGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.InstantSnapshotGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: compute.GetIamPolicyRegionInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetIamPolicyRegionInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_insert( + self, + request: compute.InsertRegionInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InsertRegionInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. + """ + return response + + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + + def pre_list( + self, + request: compute.ListRegionInstantSnapshotGroupsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListRegionInstantSnapshotGroupsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_list( + self, response: compute.ListInstantSnapshotGroups + ) -> compute.ListInstantSnapshotGroups: + """Post-rpc interceptor for list + + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. + """ + return response + + def post_list_with_metadata( + self, + response: compute.ListInstantSnapshotGroups, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListInstantSnapshotGroups, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy( + self, + request: compute.SetIamPolicyRegionInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SetIamPolicyRegionInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionInstantSnapshotGroups server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionInstantSnapshotGroups server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionInstantSnapshotGroups server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class RegionInstantSnapshotGroupsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionInstantSnapshotGroupsRestInterceptor + + +class RegionInstantSnapshotGroupsRestTransport( + _BaseRegionInstantSnapshotGroupsRestTransport +): + """REST backend synchronous transport for RegionInstantSnapshotGroups. + + The RegionInstantSnapshotGroups API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RegionInstantSnapshotGroupsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[RegionInstantSnapshotGroupsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionInstantSnapshotGroupsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseDelete, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.Delete") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.DeleteRegionInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionInstantSnapshotGroupRequest): + The request object. A request message for + RegionInstantSnapshotGroups.Delete. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseDelete._get_http_options() + + request, metadata = self._interceptor.pre_delete(request, metadata) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseDelete._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseDelete._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.Delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "Delete", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstantSnapshotGroupsRestTransport._Delete._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "Delete", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Get( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseGet, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetRegionInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.InstantSnapshotGroup: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionInstantSnapshotGroupRequest): + The request object. A request message for + RegionInstantSnapshotGroups.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.InstantSnapshotGroup: + Represents an InstantSnapshotGroup + resource. + An instant snapshot group is a set of + instant snapshots that represents a + point in time state of a consistency + group. + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseGet._get_http_options() + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstantSnapshotGroupsRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.InstantSnapshotGroup() + pb_resp = compute.InstantSnapshotGroup.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.InstantSnapshotGroup.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseGetIamPolicy, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetIamPolicyRegionInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyRegionInstantSnapshotGroupRequest): + The request object. A request message for + RegionInstantSnapshotGroups.GetIamPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + RegionInstantSnapshotGroupsRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.get_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Insert( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseInsert, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.Insert") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.InsertRegionInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionInstantSnapshotGroupRequest): + The request object. A request message for + RegionInstantSnapshotGroups.Insert. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseInsert._get_http_options() + + request, metadata = self._interceptor.pre_insert(request, metadata) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseInsert._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionInstantSnapshotGroupsRestTransport._BaseInsert._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseInsert._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.Insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "Insert", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstantSnapshotGroupsRestTransport._Insert._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "Insert", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _List( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseList, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.List") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListRegionInstantSnapshotGroupsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.ListInstantSnapshotGroups: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionInstantSnapshotGroupsRequest): + The request object. A request message for + RegionInstantSnapshotGroups.List. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.ListInstantSnapshotGroups: + Contains a list of + InstantSnapshotGroup resources. + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseList._get_http_options() + + request, metadata = self._interceptor.pre_list(request, metadata) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.List", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "List", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstantSnapshotGroupsRestTransport._List._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.ListInstantSnapshotGroups() + pb_resp = compute.ListInstantSnapshotGroups.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.ListInstantSnapshotGroups.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.list", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "List", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseSetIamPolicy, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.SetIamPolicyRegionInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyRegionInstantSnapshotGroupRequest): + The request object. A request message for + RegionInstantSnapshotGroups.SetIamPolicy. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_http_options() + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + RegionInstantSnapshotGroupsRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.set_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseTestIamPermissions, + RegionInstantSnapshotGroupsRestStub, + ): + def __hash__(self): + return hash("RegionInstantSnapshotGroupsRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsRegionInstantSnapshotGroupRequest): + The request object. A request message for + RegionInstantSnapshotGroups.TestIamPermissions. + See the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options = _BaseRegionInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseRegionInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionInstantSnapshotGroupsRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.TestPermissionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.test_iam_permissions", + extra={ + "serviceName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionInstantSnapshotGroupRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionInstantSnapshotGroupRequest], compute.InstantSnapshotGroup + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyRegionInstantSnapshotGroupRequest], compute.Policy + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionInstantSnapshotGroupRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionInstantSnapshotGroupsRequest], + compute.ListInstantSnapshotGroups, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyRegionInstantSnapshotGroupRequest], compute.Policy + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionInstantSnapshotGroupRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RegionInstantSnapshotGroupsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/rest_base.py new file mode 100644 index 000000000000..a1999430ad8f --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_instant_snapshot_groups/transports/rest_base.py @@ -0,0 +1,451 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, RegionInstantSnapshotGroupsTransport + + +class _BaseRegionInstantSnapshotGroupsRestTransport( + RegionInstantSnapshotGroupsTransport +): + """Base REST backend transport for RegionInstantSnapshotGroups. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDelete: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{instant_snapshot_group}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.DeleteRegionInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseDelete._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{instant_snapshot_group}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetRegionInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{resource}/getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetIamPolicyRegionInstantSnapshotGroupRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseGetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseInsert: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups", + "body": "instant_snapshot_group_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.InsertRegionInstantSnapshotGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseInsert._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListRegionInstantSnapshotGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseList._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.SetIamPolicyRegionInstantSnapshotGroupRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseSetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.TestIamPermissionsRegionInstantSnapshotGroupRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionInstantSnapshotGroupsRestTransport._BaseTestIamPermissions._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseRegionInstantSnapshotGroupsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/__init__.py new file mode 100644 index 000000000000..cf371a6bf62e --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionSnapshotSettingsClient + +__all__ = ("RegionSnapshotSettingsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/client.py new file mode 100644 index 000000000000..a13f83b491f8 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/client.py @@ -0,0 +1,1143 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, RegionSnapshotSettingsTransport +from .transports.rest import RegionSnapshotSettingsRestTransport + + +class RegionSnapshotSettingsClientMeta(type): + """Metaclass for the RegionSnapshotSettings client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionSnapshotSettingsTransport]] + _transport_registry["rest"] = RegionSnapshotSettingsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RegionSnapshotSettingsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionSnapshotSettingsClient(metaclass=RegionSnapshotSettingsClientMeta): + """The RegionSnapshotSettings API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSnapshotSettingsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSnapshotSettingsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionSnapshotSettingsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionSnapshotSettingsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = RegionSnapshotSettingsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = RegionSnapshotSettingsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RegionSnapshotSettingsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RegionSnapshotSettingsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + RegionSnapshotSettingsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RegionSnapshotSettingsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + RegionSnapshotSettingsTransport, + Callable[..., RegionSnapshotSettingsTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region snapshot settings client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionSnapshotSettingsTransport,Callable[..., RegionSnapshotSettingsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionSnapshotSettingsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + RegionSnapshotSettingsClient._read_environment_variables() + ) + self._client_cert_source = RegionSnapshotSettingsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = RegionSnapshotSettingsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RegionSnapshotSettingsTransport) + if transport_provided: + # transport is a RegionSnapshotSettingsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(RegionSnapshotSettingsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RegionSnapshotSettingsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RegionSnapshotSettingsTransport], + Callable[..., RegionSnapshotSettingsTransport], + ] = ( + RegionSnapshotSettingsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionSnapshotSettingsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.RegionSnapshotSettingsClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshotSettings", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.RegionSnapshotSettings", + "credentialsType": None, + }, + ) + + def get( + self, + request: Optional[Union[compute.GetRegionSnapshotSettingRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.SnapshotSettings: + r"""Get region snapshot settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionSnapshotSettingsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSnapshotSettingRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionSnapshotSettingRequest, dict]): + The request object. A request message for + RegionSnapshotSettings.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.SnapshotSettings: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetRegionSnapshotSettingRequest): + request = compute.GetRegionSnapshotSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch_unary( + self, + request: Optional[ + Union[compute.PatchRegionSnapshotSettingRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot_settings_resource: Optional[compute.SnapshotSettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Patch region snapshot settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSnapshotSettingsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSnapshotSettingRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSnapshotSettingRequest, dict]): + The request object. A request message for + RegionSnapshotSettings.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_settings_resource (google.cloud.compute_v1.types.SnapshotSettings): + The body resource for this request + This corresponds to the ``snapshot_settings_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot_settings_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PatchRegionSnapshotSettingRequest): + request = compute.PatchRegionSnapshotSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot_settings_resource is not None: + request.snapshot_settings_resource = snapshot_settings_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def patch( + self, + request: Optional[ + Union[compute.PatchRegionSnapshotSettingRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot_settings_resource: Optional[compute.SnapshotSettings] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Patch region snapshot settings. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_patch(): + # Create a client + client = compute_v1.RegionSnapshotSettingsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSnapshotSettingRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.PatchRegionSnapshotSettingRequest, dict]): + The request object. A request message for + RegionSnapshotSettings.Patch. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_settings_resource (google.cloud.compute_v1.types.SnapshotSettings): + The body resource for this request + This corresponds to the ``snapshot_settings_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot_settings_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.PatchRegionSnapshotSettingRequest): + request = compute.PatchRegionSnapshotSettingRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot_settings_resource is not None: + request.snapshot_settings_resource = snapshot_settings_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.patch] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionSnapshotSettingsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("RegionSnapshotSettingsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/README.rst new file mode 100644 index 000000000000..05c3038f9214 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``RegionSnapshotSettingsTransport`` is the ABC for all transports. + +- public child ``RegionSnapshotSettingsGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``RegionSnapshotSettingsGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseRegionSnapshotSettingsRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``RegionSnapshotSettingsRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/__init__.py new file mode 100644 index 000000000000..90555f4f3793 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionSnapshotSettingsTransport +from .rest import ( + RegionSnapshotSettingsRestInterceptor, + RegionSnapshotSettingsRestTransport, +) + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionSnapshotSettingsTransport]] +_transport_registry["rest"] = RegionSnapshotSettingsRestTransport + +__all__ = ( + "RegionSnapshotSettingsTransport", + "RegionSnapshotSettingsRestTransport", + "RegionSnapshotSettingsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/base.py new file mode 100644 index 000000000000..dcbac4ba74a4 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/base.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import region_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionSnapshotSettingsTransport(abc.ABC): + """Abstract transport class for RegionSnapshotSettings.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.patch: gapic_v1.method.wrap_method( + self.patch, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionSnapshotSettingRequest], + Union[compute.SnapshotSettings, Awaitable[compute.SnapshotSettings]], + ]: + raise NotImplementedError() + + @property + def patch( + self, + ) -> Callable[ + [compute.PatchRegionSnapshotSettingRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("RegionSnapshotSettingsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/rest.py new file mode 100644 index 000000000000..9fcc1cd3ac01 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/rest.py @@ -0,0 +1,638 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseRegionSnapshotSettingsRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionSnapshotSettingsRestInterceptor: + """Interceptor for RegionSnapshotSettings. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionSnapshotSettingsRestTransport. + + .. code-block:: python + class MyCustomRegionSnapshotSettingsInterceptor(RegionSnapshotSettingsRestInterceptor): + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_patch(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_patch(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionSnapshotSettingsRestTransport(interceptor=MyCustomRegionSnapshotSettingsInterceptor()) + client = RegionSnapshotSettingsClient(transport=transport) + + + """ + + def pre_get( + self, + request: compute.GetRegionSnapshotSettingRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetRegionSnapshotSettingRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshotSettings server. + """ + return request, metadata + + def post_get(self, response: compute.SnapshotSettings) -> compute.SnapshotSettings: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshotSettings server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.SnapshotSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SnapshotSettings, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshotSettings server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_patch( + self, + request: compute.PatchRegionSnapshotSettingRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.PatchRegionSnapshotSettingRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for patch + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshotSettings server. + """ + return request, metadata + + def post_patch(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for patch + + DEPRECATED. Please use the `post_patch_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshotSettings server but before + it is returned to user code. This `post_patch` interceptor runs + before the `post_patch_with_metadata` interceptor. + """ + return response + + def post_patch_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for patch + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshotSettings server but before it is returned to user code. + + We recommend only using this `post_patch_with_metadata` + interceptor in new development instead of the `post_patch` interceptor. + When both interceptors are used, this `post_patch_with_metadata` interceptor runs after the + `post_patch` interceptor. The (possibly modified) response returned by + `post_patch` will be passed to + `post_patch_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class RegionSnapshotSettingsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionSnapshotSettingsRestInterceptor + + +class RegionSnapshotSettingsRestTransport(_BaseRegionSnapshotSettingsRestTransport): + """REST backend synchronous transport for RegionSnapshotSettings. + + The RegionSnapshotSettings API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RegionSnapshotSettingsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[RegionSnapshotSettingsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionSnapshotSettingsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Get( + _BaseRegionSnapshotSettingsRestTransport._BaseGet, + RegionSnapshotSettingsRestStub, + ): + def __hash__(self): + return hash("RegionSnapshotSettingsRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetRegionSnapshotSettingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.SnapshotSettings: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionSnapshotSettingRequest): + The request object. A request message for + RegionSnapshotSettings.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.SnapshotSettings: + + """ + + http_options = ( + _BaseRegionSnapshotSettingsRestTransport._BaseGet._get_http_options() + ) + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = _BaseRegionSnapshotSettingsRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotSettingsRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotSettingsClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshotSettings", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotSettingsRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SnapshotSettings() + pb_resp = compute.SnapshotSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.SnapshotSettings.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotSettingsClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshotSettings", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Patch( + _BaseRegionSnapshotSettingsRestTransport._BasePatch, + RegionSnapshotSettingsRestStub, + ): + def __hash__(self): + return hash("RegionSnapshotSettingsRestTransport.Patch") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.PatchRegionSnapshotSettingRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the patch method over HTTP. + + Args: + request (~.compute.PatchRegionSnapshotSettingRequest): + The request object. A request message for + RegionSnapshotSettings.Patch. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionSnapshotSettingsRestTransport._BasePatch._get_http_options() + ) + + request, metadata = self._interceptor.pre_patch(request, metadata) + transcoded_request = _BaseRegionSnapshotSettingsRestTransport._BasePatch._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionSnapshotSettingsRestTransport._BasePatch._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotSettingsRestTransport._BasePatch._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotSettingsClient.Patch", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshotSettings", + "rpcName": "Patch", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotSettingsRestTransport._Patch._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_patch(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_patch_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotSettingsClient.patch", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshotSettings", + "rpcName": "Patch", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def get( + self, + ) -> Callable[[compute.GetRegionSnapshotSettingRequest], compute.SnapshotSettings]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def patch( + self, + ) -> Callable[[compute.PatchRegionSnapshotSettingRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Patch(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RegionSnapshotSettingsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/rest_base.py new file mode 100644 index 000000000000..e9842bd72298 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshot_settings/transports/rest_base.py @@ -0,0 +1,193 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, RegionSnapshotSettingsTransport + + +class _BaseRegionSnapshotSettingsRestTransport(RegionSnapshotSettingsTransport): + """Base REST backend transport for RegionSnapshotSettings. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshotSettings", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetRegionSnapshotSettingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotSettingsRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BasePatch: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshotSettings", + "body": "snapshot_settings_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.PatchRegionSnapshotSettingRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotSettingsRestTransport._BasePatch._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseRegionSnapshotSettingsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/__init__.py new file mode 100644 index 000000000000..9656289fcd9e --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import RegionSnapshotsClient + +__all__ = ("RegionSnapshotsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/client.py new file mode 100644 index 000000000000..b2cf79c9ff08 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/client.py @@ -0,0 +1,2767 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.services.region_snapshots import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, RegionSnapshotsTransport +from .transports.rest import RegionSnapshotsRestTransport + + +class RegionSnapshotsClientMeta(type): + """Metaclass for the RegionSnapshots client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[RegionSnapshotsTransport]] + _transport_registry["rest"] = RegionSnapshotsRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[RegionSnapshotsTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class RegionSnapshotsClient(metaclass=RegionSnapshotsClientMeta): + """The RegionSnapshots API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSnapshotsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + RegionSnapshotsClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> RegionSnapshotsTransport: + """Returns the transport used by the client instance. + + Returns: + RegionSnapshotsTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = RegionSnapshotsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = RegionSnapshotsClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = RegionSnapshotsClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = RegionSnapshotsClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = RegionSnapshotsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = RegionSnapshotsClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, RegionSnapshotsTransport, Callable[..., RegionSnapshotsTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the region snapshots client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,RegionSnapshotsTransport,Callable[..., RegionSnapshotsTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the RegionSnapshotsTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + RegionSnapshotsClient._read_environment_variables() + ) + self._client_cert_source = RegionSnapshotsClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = RegionSnapshotsClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, RegionSnapshotsTransport) + if transport_provided: + # transport is a RegionSnapshotsTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(RegionSnapshotsTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or RegionSnapshotsClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[RegionSnapshotsTransport], Callable[..., RegionSnapshotsTransport] + ] = ( + RegionSnapshotsClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., RegionSnapshotsTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.RegionSnapshotsClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "credentialsType": None, + }, + ) + + def delete_unary( + self, + request: Optional[Union[compute.DeleteRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Deletes the specified Snapshot resource. Keep in mind + that deleting a single snapshot might not necessarily + delete all the data on that snapshot. If any data on the + snapshot that is marked for deletion is needed for + subsequent snapshots, the data will be moved to the next + corresponding snapshot. + + For more information, seeDeleting + snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + delete. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteRegionSnapshotRequest): + request = compute.DeleteRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[Union[compute.DeleteRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes the specified Snapshot resource. Keep in mind + that deleting a single snapshot might not necessarily + delete all the data on that snapshot. If any data on the + snapshot that is marked for deletion is needed for + subsequent snapshots, the data will be moved to the next + corresponding snapshot. + + For more information, seeDeleting + snapshots. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.Delete. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + delete. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteRegionSnapshotRequest): + request = compute.DeleteRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[Union[compute.GetRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Snapshot: + r"""Returns the specified Snapshot resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.Get. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the Snapshot resource to + return. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Snapshot: + Represents a Persistent Disk Snapshot + resource. + You can use snapshots to back up data on + a regular interval. For more + information, read Creating + persistent disk snapshots. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetRegionSnapshotRequest): + request = compute.GetRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_iam_policy( + self, + request: Optional[ + Union[compute.GetIamPolicyRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Gets the access control policy for a resource. May be + empty if no such policy or resource exists. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetIamPolicyRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.GetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetIamPolicyRegionSnapshotRequest): + request = compute.GetIamPolicyRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[Union[compute.InsertRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Creates a snapshot in the specified region using the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSnapshotRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertRegionSnapshotRequest): + request = compute.InsertRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[Union[compute.InsertRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot_resource: Optional[compute.Snapshot] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a snapshot in the specified region using the + data included in the request. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSnapshotRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.Insert. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_resource (google.cloud.compute_v1.types.Snapshot): + The body resource for this request + This corresponds to the ``snapshot_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region, snapshot_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertRegionSnapshotRequest): + request = compute.InsertRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot_resource is not None: + request.snapshot_resource = snapshot_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[Union[compute.ListRegionSnapshotsRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPager: + r"""Retrieves the list of Snapshot resources contained + within the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSnapshotsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListRegionSnapshotsRequest, dict]): + The request object. A request message for + RegionSnapshots.List. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.region_snapshots.pagers.ListPager: + Contains a list of Snapshot + resources. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, region] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListRegionSnapshotsRequest): + request = compute.ListRegionSnapshotsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_iam_policy( + self, + request: Optional[ + Union[compute.SetIamPolicyRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_policy_request_resource: Optional[ + compute.RegionSetPolicyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Sets the access control policy on the specified + resource. Replaces any existing policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetIamPolicyRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.SetIamPolicy. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + This corresponds to the ``region_set_policy_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.Policy: + An Identity and Access Management (IAM) policy, which specifies access + controls for Google Cloud resources. + + A Policy is a collection of bindings. A binding binds + one or more members, or principals, to a single role. + Principals can be user accounts, service accounts, + Google groups, and domains (such as G Suite). A role + is a named list of permissions; each role can be an + IAM predefined role or a user-created custom role. + + For some types of Google Cloud resources, a binding + can also specify a condition, which is a logical + expression that allows access to a resource only if + the expression evaluates to true. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the [IAM + documentation](https://cloud.google.com/iam/help/conditions/resource-policies). + + **JSON example:** + + :literal:`` { "bindings": [ { "role": "roles/resourcemanager.organizationAdmin", "members": [ "user:mike@example.com", "group:admins@example.com", "domain:google.com", "serviceAccount:my-project-id@appspot.gserviceaccount.com" ] }, { "role": "roles/resourcemanager.organizationViewer", "members": [ "user:eve@example.com" ], "condition": { "title": "expirable access", "description": "Does not grant access after Sep 2020", "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", } } ], "etag": "BwWWja0YfJA=", "version": 3 }`\ \` + + **YAML example:** + + :literal:`` bindings: - members: - user:mike@example.com - group:admins@example.com - domain:google.com - serviceAccount:my-project-id@appspot.gserviceaccount.com role: roles/resourcemanager.organizationAdmin - members: - user:eve@example.com role: roles/resourcemanager.organizationViewer condition: title: expirable access description: Does not grant access after Sep 2020 expression: request.time < timestamp('2020-10-01T00:00:00.000Z') etag: BwWWja0YfJA= version: 3`\ \` + + For a description of IAM and its features, see the + [IAM + documentation](https://cloud.google.com/iam/docs/). + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + region_set_policy_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.SetIamPolicyRegionSnapshotRequest): + request = compute.SetIamPolicyRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_policy_request_resource is not None: + request.region_set_policy_request_resource = ( + region_set_policy_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_iam_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels_unary( + self, + request: Optional[Union[compute.SetLabelsRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[ + compute.RegionSetLabelsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Sets the labels on a regional snapshot. To learn more + about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + region_set_labels_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.SetLabelsRegionSnapshotRequest): + request = compute.SetLabelsRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = ( + region_set_labels_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def set_labels( + self, + request: Optional[Union[compute.SetLabelsRegionSnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + region_set_labels_request_resource: Optional[ + compute.RegionSetLabelsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Sets the labels on a regional snapshot. To learn more + about labels, read the Labeling Resources + documentation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_set_labels(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.SetLabelsRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.SetLabels. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + This corresponds to the ``region_set_labels_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + region_set_labels_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.SetLabelsRegionSnapshotRequest): + request = compute.SetLabelsRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if region_set_labels_request_resource is not None: + request.region_set_labels_request_resource = ( + region_set_labels_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.set_labels] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def test_iam_permissions( + self, + request: Optional[ + Union[compute.TestIamPermissionsRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + resource: Optional[str] = None, + test_permissions_request_resource: Optional[ + compute.TestPermissionsRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Returns permissions that a caller has on the + specified resource. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.TestIamPermissionsRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.TestIamPermissions. See + the method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + The name of the region for this + request. + + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + resource (str): + Name or id of the resource for this + request. + + This corresponds to the ``resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + This corresponds to the ``test_permissions_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.TestPermissionsResponse: + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + resource, + test_permissions_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.TestIamPermissionsRegionSnapshotRequest): + request = compute.TestIamPermissionsRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if resource is not None: + request.resource = resource + if test_permissions_request_resource is not None: + request.test_permissions_request_resource = ( + test_permissions_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("resource", request.resource), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key_unary( + self, + request: Optional[ + Union[compute.UpdateKmsKeyRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + region_snapshot_update_kms_key_request_resource: Optional[ + compute.RegionSnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeyRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.UpdateKmsKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_snapshot_update_kms_key_request_resource (google.cloud.compute_v1.types.RegionSnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + snapshot, + region_snapshot_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionSnapshotRequest): + request = compute.UpdateKmsKeyRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + if region_snapshot_update_kms_key_request_resource is not None: + request.region_snapshot_update_kms_key_request_resource = ( + region_snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[ + Union[compute.UpdateKmsKeyRegionSnapshotRequest, dict] + ] = None, + *, + project: Optional[str] = None, + region: Optional[str] = None, + snapshot: Optional[str] = None, + region_snapshot_update_kms_key_request_resource: Optional[ + compute.RegionSnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeyRegionSnapshotRequest, dict]): + The request object. A request message for + RegionSnapshots.UpdateKmsKey. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region (str): + Name of the region for this request. + This corresponds to the ``region`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + region_snapshot_update_kms_key_request_resource (google.cloud.compute_v1.types.RegionSnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``region_snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + region, + snapshot, + region_snapshot_update_kms_key_request_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeyRegionSnapshotRequest): + request = compute.UpdateKmsKeyRegionSnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if region is not None: + request.region = region + if snapshot is not None: + request.snapshot = snapshot + if region_snapshot_update_kms_key_request_resource is not None: + request.region_snapshot_update_kms_key_request_resource = ( + region_snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("region", request.region), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._region_operations_client + operation_request = compute.GetRegionOperationRequest() + operation_request.project = request.project + operation_request.region = request.region + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "RegionSnapshotsClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("RegionSnapshotsClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/pagers.py new file mode 100644 index 000000000000..230ecacddff0 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/pagers.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.SnapshotList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.SnapshotList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.SnapshotList], + request: compute.ListRegionSnapshotsRequest, + response: compute.SnapshotList, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListRegionSnapshotsRequest): + The initial request object. + response (google.cloud.compute_v1.types.SnapshotList): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListRegionSnapshotsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.SnapshotList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.Snapshot]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/README.rst new file mode 100644 index 000000000000..cf2b3e5d77f5 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``RegionSnapshotsTransport`` is the ABC for all transports. + +- public child ``RegionSnapshotsGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``RegionSnapshotsGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseRegionSnapshotsRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``RegionSnapshotsRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/__init__.py new file mode 100644 index 000000000000..af9543b7fc77 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import RegionSnapshotsTransport +from .rest import RegionSnapshotsRestInterceptor, RegionSnapshotsRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[RegionSnapshotsTransport]] +_transport_registry["rest"] = RegionSnapshotsRestTransport + +__all__ = ( + "RegionSnapshotsTransport", + "RegionSnapshotsRestTransport", + "RegionSnapshotsRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/base.py new file mode 100644 index 000000000000..692e0fa93e62 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/base.py @@ -0,0 +1,336 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import region_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionSnapshotsTransport(abc.ABC): + """Abstract transport class for RegionSnapshots.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=600.0, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_iam_policy: gapic_v1.method.wrap_method( + self.get_iam_policy, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=600.0, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.set_iam_policy: gapic_v1.method.wrap_method( + self.set_iam_policy, + default_timeout=600.0, + client_info=client_info, + ), + self.set_labels: gapic_v1.method.wrap_method( + self.set_labels, + default_timeout=600.0, + client_info=client_info, + ), + self.test_iam_permissions: gapic_v1.method.wrap_method( + self.test_iam_permissions, + default_timeout=600.0, + client_info=client_info, + ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteRegionSnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetRegionSnapshotRequest], + Union[compute.Snapshot, Awaitable[compute.Snapshot]], + ]: + raise NotImplementedError() + + @property + def get_iam_policy( + self, + ) -> Callable[ + [compute.GetIamPolicyRegionSnapshotRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertRegionSnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListRegionSnapshotsRequest], + Union[compute.SnapshotList, Awaitable[compute.SnapshotList]], + ]: + raise NotImplementedError() + + @property + def set_iam_policy( + self, + ) -> Callable[ + [compute.SetIamPolicyRegionSnapshotRequest], + Union[compute.Policy, Awaitable[compute.Policy]], + ]: + raise NotImplementedError() + + @property + def set_labels( + self, + ) -> Callable[ + [compute.SetLabelsRegionSnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionSnapshotRequest], + Union[ + compute.TestPermissionsResponse, Awaitable[compute.TestPermissionsResponse] + ], + ]: + raise NotImplementedError() + + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeyRegionSnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _region_operations_client(self) -> region_operations.RegionOperationsClient: + ex_op_service = self._extended_operations_services.get("region_operations") + if not ex_op_service: + ex_op_service = region_operations.RegionOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["region_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("RegionSnapshotsTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/rest.py new file mode 100644 index 000000000000..1fc5c7763b99 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/rest.py @@ -0,0 +1,2379 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseRegionSnapshotsRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class RegionSnapshotsRestInterceptor: + """Interceptor for RegionSnapshots. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the RegionSnapshotsRestTransport. + + .. code-block:: python + class MyCustomRegionSnapshotsInterceptor(RegionSnapshotsRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_iam_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_iam_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_set_labels(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_set_labels(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_test_iam_permissions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_test_iam_permissions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + + transport = RegionSnapshotsRestTransport(interceptor=MyCustomRegionSnapshotsInterceptor()) + client = RegionSnapshotsClient(transport=transport) + + + """ + + def pre_delete( + self, + request: compute.DeleteRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteRegionSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. + """ + return response + + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + + def pre_get( + self, + request: compute.GetRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetRegionSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_get(self, response: compute.Snapshot) -> compute.Snapshot: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.Snapshot, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Snapshot, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_get_iam_policy( + self, + request: compute.GetIamPolicyRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetIamPolicyRegionSnapshotRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_get_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for get_iam_policy + + DEPRECATED. Please use the `post_get_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_get_iam_policy` interceptor runs + before the `post_get_iam_policy_with_metadata` interceptor. + """ + return response + + def post_get_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_get_iam_policy_with_metadata` + interceptor in new development instead of the `post_get_iam_policy` interceptor. + When both interceptors are used, this `post_get_iam_policy_with_metadata` interceptor runs after the + `post_get_iam_policy` interceptor. The (possibly modified) response returned by + `post_get_iam_policy` will be passed to + `post_get_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_insert( + self, + request: compute.InsertRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InsertRegionSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. + """ + return response + + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + + def pre_list( + self, + request: compute.ListRegionSnapshotsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListRegionSnapshotsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_list(self, response: compute.SnapshotList) -> compute.SnapshotList: + """Post-rpc interceptor for list + + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. + """ + return response + + def post_list_with_metadata( + self, + response: compute.SnapshotList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.SnapshotList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + + def pre_set_iam_policy( + self, + request: compute.SetIamPolicyRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SetIamPolicyRegionSnapshotRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for set_iam_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_set_iam_policy(self, response: compute.Policy) -> compute.Policy: + """Post-rpc interceptor for set_iam_policy + + DEPRECATED. Please use the `post_set_iam_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_set_iam_policy` interceptor runs + before the `post_set_iam_policy_with_metadata` interceptor. + """ + return response + + def post_set_iam_policy_with_metadata( + self, + response: compute.Policy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Policy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_iam_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_set_iam_policy_with_metadata` + interceptor in new development instead of the `post_set_iam_policy` interceptor. + When both interceptors are used, this `post_set_iam_policy_with_metadata` interceptor runs after the + `post_set_iam_policy` interceptor. The (possibly modified) response returned by + `post_set_iam_policy` will be passed to + `post_set_iam_policy_with_metadata`. + """ + return response, metadata + + def pre_set_labels( + self, + request: compute.SetLabelsRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.SetLabelsRegionSnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for set_labels + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_set_labels(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for set_labels + + DEPRECATED. Please use the `post_set_labels_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_set_labels` interceptor runs + before the `post_set_labels_with_metadata` interceptor. + """ + return response + + def post_set_labels_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for set_labels + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_set_labels_with_metadata` + interceptor in new development instead of the `post_set_labels` interceptor. + When both interceptors are used, this `post_set_labels_with_metadata` interceptor runs after the + `post_set_labels` interceptor. The (possibly modified) response returned by + `post_set_labels` will be passed to + `post_set_labels_with_metadata`. + """ + return response, metadata + + def pre_test_iam_permissions( + self, + request: compute.TestIamPermissionsRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestIamPermissionsRegionSnapshotRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for test_iam_permissions + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_test_iam_permissions( + self, response: compute.TestPermissionsResponse + ) -> compute.TestPermissionsResponse: + """Post-rpc interceptor for test_iam_permissions + + DEPRECATED. Please use the `post_test_iam_permissions_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_test_iam_permissions` interceptor runs + before the `post_test_iam_permissions_with_metadata` interceptor. + """ + return response + + def post_test_iam_permissions_with_metadata( + self, + response: compute.TestPermissionsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.TestPermissionsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for test_iam_permissions + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_test_iam_permissions_with_metadata` + interceptor in new development instead of the `post_test_iam_permissions` interceptor. + When both interceptors are used, this `post_test_iam_permissions_with_metadata` interceptor runs after the + `post_test_iam_permissions` interceptor. The (possibly modified) response returned by + `post_test_iam_permissions` will be passed to + `post_test_iam_permissions_with_metadata`. + """ + return response, metadata + + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeyRegionSnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeyRegionSnapshotRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the RegionSnapshots server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the RegionSnapshots server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the RegionSnapshots server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class RegionSnapshotsRestStub: + _session: AuthorizedSession + _host: str + _interceptor: RegionSnapshotsRestInterceptor + + +class RegionSnapshotsRestTransport(_BaseRegionSnapshotsRestTransport): + """REST backend synchronous transport for RegionSnapshots. + + The RegionSnapshots API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[RegionSnapshotsRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[RegionSnapshotsRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or RegionSnapshotsRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete( + _BaseRegionSnapshotsRestTransport._BaseDelete, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.Delete") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.DeleteRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.Delete. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseDelete._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete(request, metadata) + transcoded_request = ( + _BaseRegionSnapshotsRestTransport._BaseDelete._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionSnapshotsRestTransport._BaseDelete._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.Delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "Delete", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._Delete._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.delete", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "Delete", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Get(_BaseRegionSnapshotsRestTransport._BaseGet, RegionSnapshotsRestStub): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Snapshot: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.Get. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Snapshot: + Represents a Persistent Disk Snapshot + resource. + You can use snapshots to back up data on + a regular interval. For more + information, read Creating + persistent disk snapshots. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseGet._get_http_options() + ) + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = ( + _BaseRegionSnapshotsRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionSnapshotsRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Snapshot() + pb_resp = compute.Snapshot.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Snapshot.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetIamPolicy( + _BaseRegionSnapshotsRestTransport._BaseGetIamPolicy, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.GetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetIamPolicyRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the get iam policy method over HTTP. + + Args: + request (~.compute.GetIamPolicyRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.GetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseGetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_iam_policy(request, metadata) + transcoded_request = _BaseRegionSnapshotsRestTransport._BaseGetIamPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotsRestTransport._BaseGetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.GetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "GetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._GetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.get_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "GetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Insert( + _BaseRegionSnapshotsRestTransport._BaseInsert, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.Insert") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.InsertRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.Insert. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseInsert._get_http_options() + ) + + request, metadata = self._interceptor.pre_insert(request, metadata) + transcoded_request = ( + _BaseRegionSnapshotsRestTransport._BaseInsert._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseRegionSnapshotsRestTransport._BaseInsert._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseRegionSnapshotsRestTransport._BaseInsert._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.Insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "Insert", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._Insert._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.insert", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "Insert", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _List(_BaseRegionSnapshotsRestTransport._BaseList, RegionSnapshotsRestStub): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.List") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListRegionSnapshotsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.SnapshotList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListRegionSnapshotsRequest): + The request object. A request message for + RegionSnapshots.List. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.SnapshotList: + Contains a list of Snapshot + resources. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseList._get_http_options() + ) + + request, metadata = self._interceptor.pre_list(request, metadata) + transcoded_request = ( + _BaseRegionSnapshotsRestTransport._BaseList._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionSnapshotsRestTransport._BaseList._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.List", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "List", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._List._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.SnapshotList() + pb_resp = compute.SnapshotList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.SnapshotList.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.list", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "List", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetIamPolicy( + _BaseRegionSnapshotsRestTransport._BaseSetIamPolicy, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.SetIamPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.SetIamPolicyRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Policy: + r"""Call the set iam policy method over HTTP. + + Args: + request (~.compute.SetIamPolicyRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.SetIamPolicy. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Policy: + An Identity and Access Management (IAM) policy, which + specifies access controls for Google Cloud resources. + + A ``Policy`` is a collection of ``bindings``. A + ``binding`` binds one or more ``members``, or + principals, to a single ``role``. Principals can be user + accounts, service accounts, Google groups, and domains + (such as G Suite). A ``role`` is a named list of + permissions; each ``role`` can be an IAM predefined role + or a user-created custom role. + + For some types of Google Cloud resources, a ``binding`` + can also specify a ``condition``, which is a logical + expression that allows access to a resource only if the + expression evaluates to ``true``. A condition can add + constraints based on attributes of the request, the + resource, or both. To learn which resources support + conditions in their IAM policies, see the `IAM + documentation `__. + + **JSON example:** + + :: + + { + "bindings": [ + { + "role": "roles/resourcemanager.organizationAdmin", + "members": [ + "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + }, + { + "role": "roles/resourcemanager.organizationViewer", + "members": [ + "user:eve@example.com" + ], + "condition": { + "title": "expirable access", + "description": "Does not grant access after Sep 2020", + "expression": "request.time < timestamp('2020-10-01T00:00:00.000Z')", + } + } + ], + "etag": "BwWWja0YfJA=", + "version": 3 + } + + **YAML example:** + + :: + + bindings: + - members: + - user:mike@example.com + - group:admins@example.com + - domain:google.com + - serviceAccount:my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin + - members: + - user:eve@example.com + role: roles/resourcemanager.organizationViewer + condition: + title: expirable access + description: Does not grant access after Sep 2020 + expression: request.time < timestamp('2020-10-01T00:00:00.000Z') + etag: BwWWja0YfJA= + version: 3 + + For a description of IAM and its features, see the `IAM + documentation `__. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseSetIamPolicy._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_iam_policy(request, metadata) + transcoded_request = _BaseRegionSnapshotsRestTransport._BaseSetIamPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionSnapshotsRestTransport._BaseSetIamPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotsRestTransport._BaseSetIamPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.SetIamPolicy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "SetIamPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._SetIamPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Policy() + pb_resp = compute.Policy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_iam_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_iam_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Policy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.set_iam_policy", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "SetIamPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SetLabels( + _BaseRegionSnapshotsRestTransport._BaseSetLabels, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.SetLabels") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.SetLabelsRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the set labels method over HTTP. + + Args: + request (~.compute.SetLabelsRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.SetLabels. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseSetLabels._get_http_options() + ) + + request, metadata = self._interceptor.pre_set_labels(request, metadata) + transcoded_request = _BaseRegionSnapshotsRestTransport._BaseSetLabels._get_transcoded_request( + http_options, request + ) + + body = ( + _BaseRegionSnapshotsRestTransport._BaseSetLabels._get_request_body_json( + transcoded_request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseRegionSnapshotsRestTransport._BaseSetLabels._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.SetLabels", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "SetLabels", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._SetLabels._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_set_labels(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_set_labels_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.set_labels", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "SetLabels", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _TestIamPermissions( + _BaseRegionSnapshotsRestTransport._BaseTestIamPermissions, + RegionSnapshotsRestStub, + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.TestIamPermissions") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.TestIamPermissionsRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.TestPermissionsResponse: + r"""Call the test iam permissions method over HTTP. + + Args: + request (~.compute.TestIamPermissionsRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.TestIamPermissions. See + the method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.TestPermissionsResponse: + + """ + + http_options = _BaseRegionSnapshotsRestTransport._BaseTestIamPermissions._get_http_options() + + request, metadata = self._interceptor.pre_test_iam_permissions( + request, metadata + ) + transcoded_request = _BaseRegionSnapshotsRestTransport._BaseTestIamPermissions._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionSnapshotsRestTransport._BaseTestIamPermissions._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotsRestTransport._BaseTestIamPermissions._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.TestIamPermissions", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "TestIamPermissions", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._TestIamPermissions._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.TestPermissionsResponse() + pb_resp = compute.TestPermissionsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_test_iam_permissions(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_test_iam_permissions_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.TestPermissionsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.test_iam_permissions", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "TestIamPermissions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateKmsKey( + _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey, RegionSnapshotsRestStub + ): + def __hash__(self): + return hash("RegionSnapshotsRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeyRegionSnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeyRegionSnapshotRequest): + The request object. A request message for + RegionSnapshots.UpdateKmsKey. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + + body = _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.RegionSnapshotsClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = RegionSnapshotsRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.RegionSnapshotsClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1.RegionSnapshots", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def delete( + self, + ) -> Callable[[compute.DeleteRegionSnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get(self) -> Callable[[compute.GetRegionSnapshotRequest], compute.Snapshot]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_iam_policy( + self, + ) -> Callable[[compute.GetIamPolicyRegionSnapshotRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[[compute.InsertRegionSnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[[compute.ListRegionSnapshotsRequest], compute.SnapshotList]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_iam_policy( + self, + ) -> Callable[[compute.SetIamPolicyRegionSnapshotRequest], compute.Policy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def set_labels( + self, + ) -> Callable[[compute.SetLabelsRegionSnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SetLabels(self._session, self._host, self._interceptor) # type: ignore + + @property + def test_iam_permissions( + self, + ) -> Callable[ + [compute.TestIamPermissionsRegionSnapshotRequest], + compute.TestPermissionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeyRegionSnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("RegionSnapshotsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/rest_base.py new file mode 100644 index 000000000000..b89e2da462d5 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/region_snapshots/transports/rest_base.py @@ -0,0 +1,555 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, RegionSnapshotsTransport + + +class _BaseRegionSnapshotsRestTransport(RegionSnapshotsTransport): + """Base REST backend transport for RegionSnapshots. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDelete: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.DeleteRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseDelete._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/getIamPolicy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetIamPolicyRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseGetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseInsert: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots", + "body": "snapshot_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.InsertRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseInsert._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListRegionSnapshotsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseList._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseSetIamPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/setIamPolicy", + "body": "region_set_policy_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.SetIamPolicyRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseSetIamPolicy._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseSetLabels: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/setLabels", + "body": "region_set_labels_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.SetLabelsRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseSetLabels._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseTestIamPermissions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/testIamPermissions", + "body": "test_permissions_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.TestIamPermissionsRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseTestIamPermissions._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}/updateKmsKey", + "body": "region_snapshot_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeyRegionSnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseRegionSnapshotsRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseRegionSnapshotsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py index e936a4b591c9..2e200e7278e1 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/client.py @@ -2252,6 +2252,304 @@ def sample_test_iam_permissions(): # Done; return the response. return response + def update_kms_key_unary( + self, + request: Optional[Union[compute.UpdateKmsKeySnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + snapshot_update_kms_key_request_resource: Optional[ + compute.SnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeySnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeySnapshotRequest, dict]): + The request object. A request message for + Snapshots.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_update_kms_key_request_resource (google.cloud.compute_v1.types.SnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, snapshot, snapshot_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeySnapshotRequest): + request = compute.UpdateKmsKeySnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + if snapshot_update_kms_key_request_resource is not None: + request.snapshot_update_kms_key_request_resource = ( + snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_kms_key( + self, + request: Optional[Union[compute.UpdateKmsKeySnapshotRequest, dict]] = None, + *, + project: Optional[str] = None, + snapshot: Optional[str] = None, + snapshot_update_kms_key_request_resource: Optional[ + compute.SnapshotUpdateKmsKeyRequest + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Rotates the customer-managed + encryption key to the latest version for the specified + snapshot. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update_kms_key(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeySnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateKmsKeySnapshotRequest, dict]): + The request object. A request message for + Snapshots.UpdateKmsKey. See the method + description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot (str): + Name of the snapshot resource to + update. Should conform to RFC1035. + + This corresponds to the ``snapshot`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + snapshot_update_kms_key_request_resource (google.cloud.compute_v1.types.SnapshotUpdateKmsKeyRequest): + The body resource for this request + This corresponds to the ``snapshot_update_kms_key_request_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, snapshot, snapshot_update_kms_key_request_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateKmsKeySnapshotRequest): + request = compute.UpdateKmsKeySnapshotRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if snapshot is not None: + request.snapshot = snapshot + if snapshot_update_kms_key_request_resource is not None: + request.snapshot_update_kms_key_request_resource = ( + snapshot_update_kms_key_request_resource + ) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_kms_key] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("snapshot", request.snapshot), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._global_operations_client + operation_request = compute.GetGlobalOperationRequest() + operation_request.project = request.project + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + def __enter__(self) -> "SnapshotsClient": return self diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/base.py index 9b1cb17edbe6..f13108ecaf34 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/base.py @@ -217,6 +217,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=600.0, client_info=client_info, ), + self.update_kms_key: gapic_v1.method.wrap_method( + self.update_kms_key, + default_timeout=600.0, + client_info=client_info, + ), } def close(self): @@ -302,6 +307,15 @@ def test_iam_permissions( ]: raise NotImplementedError() + @property + def update_kms_key( + self, + ) -> Callable[ + [compute.UpdateKmsKeySnapshotRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py index db11ebf78027..a0ff27d3ebf8 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest.py @@ -136,6 +136,14 @@ def post_test_iam_permissions(self, response): logging.log(f"Received response: {response}") return response + def pre_update_kms_key(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_kms_key(self, response): + logging.log(f"Received response: {response}") + return response + transport = SnapshotsRestTransport(interceptor=MyCustomSnapshotsInterceptor()) client = SnapshotsClient(transport=transport) @@ -507,6 +515,52 @@ def post_test_iam_permissions_with_metadata( """ return response, metadata + def pre_update_kms_key( + self, + request: compute.UpdateKmsKeySnapshotRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateKmsKeySnapshotRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_kms_key + + Override in a subclass to manipulate the request or metadata + before they are sent to the Snapshots server. + """ + return request, metadata + + def post_update_kms_key(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update_kms_key + + DEPRECATED. Please use the `post_update_kms_key_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Snapshots server but before + it is returned to user code. This `post_update_kms_key` interceptor runs + before the `post_update_kms_key_with_metadata` interceptor. + """ + return response + + def post_update_kms_key_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_kms_key + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Snapshots server but before it is returned to user code. + + We recommend only using this `post_update_kms_key_with_metadata` + interceptor in new development instead of the `post_update_kms_key` interceptor. + When both interceptors are used, this `post_update_kms_key_with_metadata` interceptor runs after the + `post_update_kms_key` interceptor. The (possibly modified) response returned by + `post_update_kms_key` will be passed to + `post_update_kms_key_with_metadata`. + """ + return response, metadata + @dataclasses.dataclass class SnapshotsRestStub: @@ -2039,6 +2093,190 @@ def __call__( ) return resp + class _UpdateKmsKey( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey, SnapshotsRestStub + ): + def __hash__(self): + return hash("SnapshotsRestTransport.UpdateKmsKey") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateKmsKeySnapshotRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update kms key method over HTTP. + + Args: + request (~.compute.UpdateKmsKeySnapshotRequest): + The request object. A request message for + Snapshots.UpdateKmsKey. See the method + description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = ( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_kms_key(request, metadata) + transcoded_request = ( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.SnapshotsClient.UpdateKmsKey", + extra={ + "serviceName": "google.cloud.compute.v1.Snapshots", + "rpcName": "UpdateKmsKey", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = SnapshotsRestTransport._UpdateKmsKey._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_kms_key(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_kms_key_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.SnapshotsClient.update_kms_key", + extra={ + "serviceName": "google.cloud.compute.v1.Snapshots", + "rpcName": "UpdateKmsKey", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def delete(self) -> Callable[[compute.DeleteSnapshotRequest], compute.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -2097,6 +2335,14 @@ def test_iam_permissions( # In C++ this would require a dynamic_cast return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + @property + def update_kms_key( + self, + ) -> Callable[[compute.UpdateKmsKeySnapshotRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateKmsKey(self._session, self._host, self._interceptor) # type: ignore + @property def kind(self) -> str: return "rest" diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest_base.py index 97de33c21c59..e8b25c25feff 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest_base.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/snapshots/transports/rest_base.py @@ -495,5 +495,61 @@ def _get_query_params_json(transcoded_request): return query_params + class _BaseUpdateKmsKey: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/global/snapshots/{snapshot}/updateKmsKey", + "body": "snapshot_update_kms_key_request_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateKmsKeySnapshotRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseSnapshotsRestTransport._BaseUpdateKmsKey._get_unset_required_fields( + query_params + ) + ) + + return query_params + __all__ = ("_BaseSnapshotsRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/__init__.py new file mode 100644 index 000000000000..2102bc0bf7fb --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/__init__.py @@ -0,0 +1,18 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import ZoneVmExtensionPoliciesClient + +__all__ = ("ZoneVmExtensionPoliciesClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/client.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/client.py new file mode 100644 index 000000000000..c6cccaf4fa8d --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/client.py @@ -0,0 +1,1914 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import functools +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import extended_operation, gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.extended_operation as extended_operation # type: ignore + +from google.cloud.compute_v1.services.zone_vm_extension_policies import pagers +from google.cloud.compute_v1.types import compute + +from .transports.base import DEFAULT_CLIENT_INFO, ZoneVmExtensionPoliciesTransport +from .transports.rest import ZoneVmExtensionPoliciesRestTransport + + +class ZoneVmExtensionPoliciesClientMeta(type): + """Metaclass for the ZoneVmExtensionPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[ZoneVmExtensionPoliciesTransport]] + _transport_registry["rest"] = ZoneVmExtensionPoliciesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[ZoneVmExtensionPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class ZoneVmExtensionPoliciesClient(metaclass=ZoneVmExtensionPoliciesClientMeta): + """The ZoneVmExtensionPolicies API.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "compute.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "compute.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ZoneVmExtensionPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + ZoneVmExtensionPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> ZoneVmExtensionPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + ZoneVmExtensionPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = ZoneVmExtensionPoliciesClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = ZoneVmExtensionPoliciesClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = ZoneVmExtensionPoliciesClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = ZoneVmExtensionPoliciesClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = ( + ZoneVmExtensionPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = ZoneVmExtensionPoliciesClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, + ZoneVmExtensionPoliciesTransport, + Callable[..., ZoneVmExtensionPoliciesTransport], + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the zone vm extension policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,ZoneVmExtensionPoliciesTransport,Callable[..., ZoneVmExtensionPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the ZoneVmExtensionPoliciesTransport constructor. + If set to None, a transport is chosen automatically. + NOTE: "rest" transport functionality is currently in a + beta state (preview). We welcome your feedback via an + issue in this library's source repository. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + ZoneVmExtensionPoliciesClient._read_environment_variables() + ) + self._client_cert_source = ( + ZoneVmExtensionPoliciesClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + ) + self._universe_domain = ZoneVmExtensionPoliciesClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, ZoneVmExtensionPoliciesTransport) + if transport_provided: + # transport is a ZoneVmExtensionPoliciesTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(ZoneVmExtensionPoliciesTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or ZoneVmExtensionPoliciesClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[ZoneVmExtensionPoliciesTransport], + Callable[..., ZoneVmExtensionPoliciesTransport], + ] = ( + ZoneVmExtensionPoliciesClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., ZoneVmExtensionPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.compute_v1.ZoneVmExtensionPoliciesClient`.", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "credentialsType": None, + }, + ) + + def delete_unary( + self, + request: Optional[ + Union[compute.DeleteZoneVmExtensionPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Deletes a specified zone VM extension policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy (str): + Name of the zone VM extension policy + to delete. + + This corresponds to the ``vm_extension_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, vm_extension_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteZoneVmExtensionPolicyRequest): + request = compute.DeleteZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy is not None: + request.vm_extension_policy = vm_extension_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("vm_extension_policy", request.vm_extension_policy), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete( + self, + request: Optional[ + Union[compute.DeleteZoneVmExtensionPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Deletes a specified zone VM extension policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_delete(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.DeleteZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Delete. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy (str): + Name of the zone VM extension policy + to delete. + + This corresponds to the ``vm_extension_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, vm_extension_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.DeleteZoneVmExtensionPolicyRequest): + request = compute.DeleteZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy is not None: + request.vm_extension_policy = vm_extension_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("vm_extension_policy", request.vm_extension_policy), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def get( + self, + request: Optional[Union[compute.GetZoneVmExtensionPolicyRequest, dict]] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.VmExtensionPolicy: + r"""Retrieves details of a specific zone VM extension + policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_get(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.GetZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Get. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy (str): + Name of the VM extension policy + resource to return. + + This corresponds to the ``vm_extension_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.types.VmExtensionPolicy: + Represents a VM extension policy. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, vm_extension_policy] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.GetZoneVmExtensionPolicyRequest): + request = compute.GetZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy is not None: + request.vm_extension_policy = vm_extension_policy + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("vm_extension_policy", request.vm_extension_policy), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert_unary( + self, + request: Optional[ + Union[compute.InsertZoneVmExtensionPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy_resource: Optional[compute.VmExtensionPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Creates a new zone-level VM extension policy within a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertZoneVmExtensionPolicyRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy_resource (google.cloud.compute_v1.types.VmExtensionPolicy): + The body resource for this request + This corresponds to the ``vm_extension_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, vm_extension_policy_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertZoneVmExtensionPolicyRequest): + request = compute.InsertZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy_resource is not None: + request.vm_extension_policy_resource = vm_extension_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def insert( + self, + request: Optional[ + Union[compute.InsertZoneVmExtensionPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy_resource: Optional[compute.VmExtensionPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Creates a new zone-level VM extension policy within a + project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_insert(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertZoneVmExtensionPolicyRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.InsertZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Insert. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy_resource (google.cloud.compute_v1.types.VmExtensionPolicy): + The body resource for this request + This corresponds to the ``vm_extension_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone, vm_extension_policy_resource] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.InsertZoneVmExtensionPolicyRequest): + request = compute.InsertZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy_resource is not None: + request.vm_extension_policy_resource = vm_extension_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.insert] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def list( + self, + request: Optional[ + Union[compute.ListZoneVmExtensionPoliciesRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListPager: + r"""Lists all VM extension policies within a specific + zone for a project. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_list(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListZoneVmExtensionPoliciesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.ListZoneVmExtensionPoliciesRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.List. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.compute_v1.services.zone_vm_extension_policies.pagers.ListPager: + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [project, zone] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.ListZoneVmExtensionPoliciesRequest): + request = compute.ListZoneVmExtensionPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_unary( + self, + request: Optional[ + Union[compute.UpdateZoneVmExtensionPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy: Optional[str] = None, + vm_extension_policy_resource: Optional[compute.VmExtensionPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Modifies an existing zone VM extension policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Update. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy (str): + Name of the zone VM extension policy + to update. + + This corresponds to the ``vm_extension_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy_resource (google.cloud.compute_v1.types.VmExtensionPolicy): + The body resource for this request + This corresponds to the ``vm_extension_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + zone, + vm_extension_policy, + vm_extension_policy_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateZoneVmExtensionPolicyRequest): + request = compute.UpdateZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy is not None: + request.vm_extension_policy = vm_extension_policy + if vm_extension_policy_resource is not None: + request.vm_extension_policy_resource = vm_extension_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("vm_extension_policy", request.vm_extension_policy), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update( + self, + request: Optional[ + Union[compute.UpdateZoneVmExtensionPolicyRequest, dict] + ] = None, + *, + project: Optional[str] = None, + zone: Optional[str] = None, + vm_extension_policy: Optional[str] = None, + vm_extension_policy_resource: Optional[compute.VmExtensionPolicy] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> extended_operation.ExtendedOperation: + r"""Modifies an existing zone VM extension policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import compute_v1 + + def sample_update(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.compute_v1.types.UpdateZoneVmExtensionPolicyRequest, dict]): + The request object. A request message for + ZoneVmExtensionPolicies.Update. See the + method description for details. + project (str): + Project ID for this request. + This corresponds to the ``project`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + zone (str): + Name of the zone for this request. + This corresponds to the ``zone`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy (str): + Name of the zone VM extension policy + to update. + + This corresponds to the ``vm_extension_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + vm_extension_policy_resource (google.cloud.compute_v1.types.VmExtensionPolicy): + The body resource for this request + This corresponds to the ``vm_extension_policy_resource`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.extended_operation.ExtendedOperation: + An object representing a extended + long-running operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [ + project, + zone, + vm_extension_policy, + vm_extension_policy_resource, + ] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, compute.UpdateZoneVmExtensionPolicyRequest): + request = compute.UpdateZoneVmExtensionPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if project is not None: + request.project = project + if zone is not None: + request.zone = zone + if vm_extension_policy is not None: + request.vm_extension_policy = vm_extension_policy + if vm_extension_policy_resource is not None: + request.vm_extension_policy_resource = vm_extension_policy_resource + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + ( + ("project", request.project), + ("zone", request.zone), + ("vm_extension_policy", request.vm_extension_policy), + ) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + operation_service = self._transport._zone_operations_client + operation_request = compute.GetZoneOperationRequest() + operation_request.project = request.project + operation_request.zone = request.zone + operation_request.operation = response.name + + get_operation = functools.partial(operation_service.get, operation_request) + # Cancel is not part of extended operations yet. + cancel_operation = lambda: None + + # Note: this class is an implementation detail to provide a uniform + # set of names for certain fields in the extended operation proto message. + # See google.api_core.extended_operation.ExtendedOperation for details + # on these properties and the expected interface. + class _CustomOperation(extended_operation.ExtendedOperation): + @property + def error_message(self): + return self._extended_operation.http_error_message + + @property + def error_code(self): + return self._extended_operation.http_error_status_code + + response = _CustomOperation.make(get_operation, cancel_operation, response) + + # Done; return the response. + return response + + def __enter__(self) -> "ZoneVmExtensionPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("ZoneVmExtensionPoliciesClient",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/pagers.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/pagers.py new file mode 100644 index 000000000000..1302a7ff2af2 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/pagers.py @@ -0,0 +1,117 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.compute_v1.types import compute + + +class ListPager: + """A pager for iterating through ``list`` requests. + + This class thinly wraps an initial + :class:`google.cloud.compute_v1.types.VmExtensionPolicyList` object, and + provides an ``__iter__`` method to iterate through its + ``items`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``List`` requests and continue to iterate + through the ``items`` field on the + corresponding responses. + + All the usual :class:`google.cloud.compute_v1.types.VmExtensionPolicyList` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., compute.VmExtensionPolicyList], + request: compute.ListZoneVmExtensionPoliciesRequest, + response: compute.VmExtensionPolicyList, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.compute_v1.types.ListZoneVmExtensionPoliciesRequest): + The initial request object. + response (google.cloud.compute_v1.types.VmExtensionPolicyList): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = compute.ListZoneVmExtensionPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[compute.VmExtensionPolicyList]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[compute.VmExtensionPolicy]: + for page in self.pages: + yield from page.items + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/README.rst b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/README.rst new file mode 100644 index 000000000000..86107a652935 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``ZoneVmExtensionPoliciesTransport`` is the ABC for all transports. + +- public child ``ZoneVmExtensionPoliciesGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``ZoneVmExtensionPoliciesGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseZoneVmExtensionPoliciesRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``ZoneVmExtensionPoliciesRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/__init__.py new file mode 100644 index 000000000000..1b0b868b7c67 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import ZoneVmExtensionPoliciesTransport +from .rest import ( + ZoneVmExtensionPoliciesRestInterceptor, + ZoneVmExtensionPoliciesRestTransport, +) + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[ZoneVmExtensionPoliciesTransport]] +_transport_registry["rest"] = ZoneVmExtensionPoliciesRestTransport + +__all__ = ( + "ZoneVmExtensionPoliciesTransport", + "ZoneVmExtensionPoliciesRestTransport", + "ZoneVmExtensionPoliciesRestInterceptor", +) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/base.py new file mode 100644 index 000000000000..0f44b92ce36b --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/base.py @@ -0,0 +1,268 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Any, Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.compute_v1 import gapic_version as package_version +from google.cloud.compute_v1.services import zone_operations +from google.cloud.compute_v1.types import compute + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class ZoneVmExtensionPoliciesTransport(abc.ABC): + """Abstract transport class for ZoneVmExtensionPolicies.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "compute.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + self._extended_operations_services: Dict[str, Any] = {} + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.delete: gapic_v1.method.wrap_method( + self.delete, + default_timeout=600.0, + client_info=client_info, + ), + self.get: gapic_v1.method.wrap_method( + self.get, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.insert: gapic_v1.method.wrap_method( + self.insert, + default_timeout=600.0, + client_info=client_info, + ), + self.list: gapic_v1.method.wrap_method( + self.list, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update: gapic_v1.method.wrap_method( + self.update, + default_timeout=600.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def delete( + self, + ) -> Callable[ + [compute.DeleteZoneVmExtensionPolicyRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def get( + self, + ) -> Callable[ + [compute.GetZoneVmExtensionPolicyRequest], + Union[compute.VmExtensionPolicy, Awaitable[compute.VmExtensionPolicy]], + ]: + raise NotImplementedError() + + @property + def insert( + self, + ) -> Callable[ + [compute.InsertZoneVmExtensionPolicyRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def list( + self, + ) -> Callable[ + [compute.ListZoneVmExtensionPoliciesRequest], + Union[compute.VmExtensionPolicyList, Awaitable[compute.VmExtensionPolicyList]], + ]: + raise NotImplementedError() + + @property + def update( + self, + ) -> Callable[ + [compute.UpdateZoneVmExtensionPolicyRequest], + Union[compute.Operation, Awaitable[compute.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + @property + def _zone_operations_client(self) -> zone_operations.ZoneOperationsClient: + ex_op_service = self._extended_operations_services.get("zone_operations") + if not ex_op_service: + ex_op_service = zone_operations.ZoneOperationsClient( + credentials=self._credentials, + transport=self.kind, + ) + self._extended_operations_services["zone_operations"] = ex_op_service + + return ex_op_service + + +__all__ = ("ZoneVmExtensionPoliciesTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/rest.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/rest.py new file mode 100644 index 000000000000..e6f77caca460 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/rest.py @@ -0,0 +1,1328 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseZoneVmExtensionPoliciesRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class ZoneVmExtensionPoliciesRestInterceptor: + """Interceptor for ZoneVmExtensionPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the ZoneVmExtensionPoliciesRestTransport. + + .. code-block:: python + class MyCustomZoneVmExtensionPoliciesInterceptor(ZoneVmExtensionPoliciesRestInterceptor): + def pre_delete(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_insert(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_insert(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update(self, response): + logging.log(f"Received response: {response}") + return response + + transport = ZoneVmExtensionPoliciesRestTransport(interceptor=MyCustomZoneVmExtensionPoliciesInterceptor()) + client = ZoneVmExtensionPoliciesClient(transport=transport) + + + """ + + def pre_delete( + self, + request: compute.DeleteZoneVmExtensionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.DeleteZoneVmExtensionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneVmExtensionPolicies server. + """ + return request, metadata + + def post_delete(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for delete + + DEPRECATED. Please use the `post_delete_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ZoneVmExtensionPolicies server but before + it is returned to user code. This `post_delete` interceptor runs + before the `post_delete_with_metadata` interceptor. + """ + return response + + def post_delete_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneVmExtensionPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_with_metadata` + interceptor in new development instead of the `post_delete` interceptor. + When both interceptors are used, this `post_delete_with_metadata` interceptor runs after the + `post_delete` interceptor. The (possibly modified) response returned by + `post_delete` will be passed to + `post_delete_with_metadata`. + """ + return response, metadata + + def pre_get( + self, + request: compute.GetZoneVmExtensionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.GetZoneVmExtensionPolicyRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneVmExtensionPolicies server. + """ + return request, metadata + + def post_get( + self, response: compute.VmExtensionPolicy + ) -> compute.VmExtensionPolicy: + """Post-rpc interceptor for get + + DEPRECATED. Please use the `post_get_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ZoneVmExtensionPolicies server but before + it is returned to user code. This `post_get` interceptor runs + before the `post_get_with_metadata` interceptor. + """ + return response + + def post_get_with_metadata( + self, + response: compute.VmExtensionPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.VmExtensionPolicy, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneVmExtensionPolicies server but before it is returned to user code. + + We recommend only using this `post_get_with_metadata` + interceptor in new development instead of the `post_get` interceptor. + When both interceptors are used, this `post_get_with_metadata` interceptor runs after the + `post_get` interceptor. The (possibly modified) response returned by + `post_get` will be passed to + `post_get_with_metadata`. + """ + return response, metadata + + def pre_insert( + self, + request: compute.InsertZoneVmExtensionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.InsertZoneVmExtensionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for insert + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneVmExtensionPolicies server. + """ + return request, metadata + + def post_insert(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for insert + + DEPRECATED. Please use the `post_insert_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ZoneVmExtensionPolicies server but before + it is returned to user code. This `post_insert` interceptor runs + before the `post_insert_with_metadata` interceptor. + """ + return response + + def post_insert_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for insert + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneVmExtensionPolicies server but before it is returned to user code. + + We recommend only using this `post_insert_with_metadata` + interceptor in new development instead of the `post_insert` interceptor. + When both interceptors are used, this `post_insert_with_metadata` interceptor runs after the + `post_insert` interceptor. The (possibly modified) response returned by + `post_insert` will be passed to + `post_insert_with_metadata`. + """ + return response, metadata + + def pre_list( + self, + request: compute.ListZoneVmExtensionPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.ListZoneVmExtensionPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneVmExtensionPolicies server. + """ + return request, metadata + + def post_list( + self, response: compute.VmExtensionPolicyList + ) -> compute.VmExtensionPolicyList: + """Post-rpc interceptor for list + + DEPRECATED. Please use the `post_list_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ZoneVmExtensionPolicies server but before + it is returned to user code. This `post_list` interceptor runs + before the `post_list_with_metadata` interceptor. + """ + return response + + def post_list_with_metadata( + self, + response: compute.VmExtensionPolicyList, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.VmExtensionPolicyList, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneVmExtensionPolicies server but before it is returned to user code. + + We recommend only using this `post_list_with_metadata` + interceptor in new development instead of the `post_list` interceptor. + When both interceptors are used, this `post_list_with_metadata` interceptor runs after the + `post_list` interceptor. The (possibly modified) response returned by + `post_list` will be passed to + `post_list_with_metadata`. + """ + return response, metadata + + def pre_update( + self, + request: compute.UpdateZoneVmExtensionPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + compute.UpdateZoneVmExtensionPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update + + Override in a subclass to manipulate the request or metadata + before they are sent to the ZoneVmExtensionPolicies server. + """ + return request, metadata + + def post_update(self, response: compute.Operation) -> compute.Operation: + """Post-rpc interceptor for update + + DEPRECATED. Please use the `post_update_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ZoneVmExtensionPolicies server but before + it is returned to user code. This `post_update` interceptor runs + before the `post_update_with_metadata` interceptor. + """ + return response + + def post_update_with_metadata( + self, + response: compute.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[compute.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ZoneVmExtensionPolicies server but before it is returned to user code. + + We recommend only using this `post_update_with_metadata` + interceptor in new development instead of the `post_update` interceptor. + When both interceptors are used, this `post_update_with_metadata` interceptor runs after the + `post_update` interceptor. The (possibly modified) response returned by + `post_update` will be passed to + `post_update_with_metadata`. + """ + return response, metadata + + +@dataclasses.dataclass +class ZoneVmExtensionPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: ZoneVmExtensionPoliciesRestInterceptor + + +class ZoneVmExtensionPoliciesRestTransport(_BaseZoneVmExtensionPoliciesRestTransport): + """REST backend synchronous transport for ZoneVmExtensionPolicies. + + The ZoneVmExtensionPolicies API. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[ZoneVmExtensionPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + NOTE: This REST transport functionality is currently in a beta + state (preview). We welcome your feedback via a GitHub issue in + this library's repository. Thank you! + + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[ZoneVmExtensionPoliciesRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or ZoneVmExtensionPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _Delete( + _BaseZoneVmExtensionPoliciesRestTransport._BaseDelete, + ZoneVmExtensionPoliciesRestStub, + ): + def __hash__(self): + return hash("ZoneVmExtensionPoliciesRestTransport.Delete") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.DeleteZoneVmExtensionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the delete method over HTTP. + + Args: + request (~.compute.DeleteZoneVmExtensionPolicyRequest): + The request object. A request message for + ZoneVmExtensionPolicies.Delete. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseZoneVmExtensionPoliciesRestTransport._BaseDelete._get_http_options() + + request, metadata = self._interceptor.pre_delete(request, metadata) + transcoded_request = _BaseZoneVmExtensionPoliciesRestTransport._BaseDelete._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseZoneVmExtensionPoliciesRestTransport._BaseDelete._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.Delete", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Delete", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ZoneVmExtensionPoliciesRestTransport._Delete._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.delete", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Delete", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Get( + _BaseZoneVmExtensionPoliciesRestTransport._BaseGet, + ZoneVmExtensionPoliciesRestStub, + ): + def __hash__(self): + return hash("ZoneVmExtensionPoliciesRestTransport.Get") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.GetZoneVmExtensionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.VmExtensionPolicy: + r"""Call the get method over HTTP. + + Args: + request (~.compute.GetZoneVmExtensionPolicyRequest): + The request object. A request message for + ZoneVmExtensionPolicies.Get. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.VmExtensionPolicy: + Represents a VM extension policy. + """ + + http_options = ( + _BaseZoneVmExtensionPoliciesRestTransport._BaseGet._get_http_options() + ) + + request, metadata = self._interceptor.pre_get(request, metadata) + transcoded_request = _BaseZoneVmExtensionPoliciesRestTransport._BaseGet._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseZoneVmExtensionPoliciesRestTransport._BaseGet._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.Get", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Get", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ZoneVmExtensionPoliciesRestTransport._Get._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VmExtensionPolicy() + pb_resp = compute.VmExtensionPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.VmExtensionPolicy.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.get", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Get", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Insert( + _BaseZoneVmExtensionPoliciesRestTransport._BaseInsert, + ZoneVmExtensionPoliciesRestStub, + ): + def __hash__(self): + return hash("ZoneVmExtensionPoliciesRestTransport.Insert") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.InsertZoneVmExtensionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the insert method over HTTP. + + Args: + request (~.compute.InsertZoneVmExtensionPolicyRequest): + The request object. A request message for + ZoneVmExtensionPolicies.Insert. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseZoneVmExtensionPoliciesRestTransport._BaseInsert._get_http_options() + + request, metadata = self._interceptor.pre_insert(request, metadata) + transcoded_request = _BaseZoneVmExtensionPoliciesRestTransport._BaseInsert._get_transcoded_request( + http_options, request + ) + + body = _BaseZoneVmExtensionPoliciesRestTransport._BaseInsert._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseZoneVmExtensionPoliciesRestTransport._BaseInsert._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.Insert", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Insert", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ZoneVmExtensionPoliciesRestTransport._Insert._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_insert(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_insert_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.insert", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Insert", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _List( + _BaseZoneVmExtensionPoliciesRestTransport._BaseList, + ZoneVmExtensionPoliciesRestStub, + ): + def __hash__(self): + return hash("ZoneVmExtensionPoliciesRestTransport.List") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: compute.ListZoneVmExtensionPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.VmExtensionPolicyList: + r"""Call the list method over HTTP. + + Args: + request (~.compute.ListZoneVmExtensionPoliciesRequest): + The request object. A request message for + ZoneVmExtensionPolicies.List. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.VmExtensionPolicyList: + + """ + + http_options = ( + _BaseZoneVmExtensionPoliciesRestTransport._BaseList._get_http_options() + ) + + request, metadata = self._interceptor.pre_list(request, metadata) + transcoded_request = _BaseZoneVmExtensionPoliciesRestTransport._BaseList._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseZoneVmExtensionPoliciesRestTransport._BaseList._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.List", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "List", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ZoneVmExtensionPoliciesRestTransport._List._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.VmExtensionPolicyList() + pb_resp = compute.VmExtensionPolicyList.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_with_metadata(resp, response_metadata) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.VmExtensionPolicyList.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.list", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "List", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _Update( + _BaseZoneVmExtensionPoliciesRestTransport._BaseUpdate, + ZoneVmExtensionPoliciesRestStub, + ): + def __hash__(self): + return hash("ZoneVmExtensionPoliciesRestTransport.Update") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: compute.UpdateZoneVmExtensionPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> compute.Operation: + r"""Call the update method over HTTP. + + Args: + request (~.compute.UpdateZoneVmExtensionPolicyRequest): + The request object. A request message for + ZoneVmExtensionPolicies.Update. See the + method description for details. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.compute.Operation: + Represents an Operation resource. + + Google Compute Engine has three Operation resources: + + - `Global `__ + - `Regional `__ + - `Zonal `__ + + You can use an operation resource to manage asynchronous + API requests. For more information, readHandling API + responses. + + Operations can be global, regional or zonal. + + :: + + - For global operations, use the `globalOperations` + resource. + - For regional operations, use the + `regionOperations` resource. + - For zonal operations, use + the `zoneOperations` resource. + + For more information, read Global, Regional, and Zonal + Resources. + + Note that completed Operation resources have a limited + retention period. + + """ + + http_options = _BaseZoneVmExtensionPoliciesRestTransport._BaseUpdate._get_http_options() + + request, metadata = self._interceptor.pre_update(request, metadata) + transcoded_request = _BaseZoneVmExtensionPoliciesRestTransport._BaseUpdate._get_transcoded_request( + http_options, request + ) + + body = _BaseZoneVmExtensionPoliciesRestTransport._BaseUpdate._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseZoneVmExtensionPoliciesRestTransport._BaseUpdate._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.Update", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Update", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ZoneVmExtensionPoliciesRestTransport._Update._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = compute.Operation() + pb_resp = compute.Operation.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = compute.Operation.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.update", + extra={ + "serviceName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "rpcName": "Update", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def delete( + self, + ) -> Callable[[compute.DeleteZoneVmExtensionPolicyRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Delete(self._session, self._host, self._interceptor) # type: ignore + + @property + def get( + self, + ) -> Callable[[compute.GetZoneVmExtensionPolicyRequest], compute.VmExtensionPolicy]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Get(self._session, self._host, self._interceptor) # type: ignore + + @property + def insert( + self, + ) -> Callable[[compute.InsertZoneVmExtensionPolicyRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Insert(self._session, self._host, self._interceptor) # type: ignore + + @property + def list( + self, + ) -> Callable[ + [compute.ListZoneVmExtensionPoliciesRequest], compute.VmExtensionPolicyList + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._List(self._session, self._host, self._interceptor) # type: ignore + + @property + def update( + self, + ) -> Callable[[compute.UpdateZoneVmExtensionPolicyRequest], compute.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._Update(self._session, self._host, self._interceptor) # type: ignore + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("ZoneVmExtensionPoliciesRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/rest_base.py b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/rest_base.py new file mode 100644 index 000000000000..921cff46f224 --- /dev/null +++ b/packages/google-cloud-compute/google/cloud/compute_v1/services/zone_vm_extension_policies/transports/rest_base.py @@ -0,0 +1,341 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.protobuf import json_format + +from google.cloud.compute_v1.types import compute + +from .base import DEFAULT_CLIENT_INFO, ZoneVmExtensionPoliciesTransport + + +class _BaseZoneVmExtensionPoliciesRestTransport(ZoneVmExtensionPoliciesTransport): + """Base REST backend transport for ZoneVmExtensionPolicies. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "compute.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'compute.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseDelete: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.DeleteZoneVmExtensionPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseZoneVmExtensionPoliciesRestTransport._BaseDelete._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseGet: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.GetZoneVmExtensionPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseZoneVmExtensionPoliciesRestTransport._BaseGet._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseInsert: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies", + "body": "vm_extension_policy_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.InsertZoneVmExtensionPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseZoneVmExtensionPoliciesRestTransport._BaseInsert._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseList: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.ListZoneVmExtensionPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseZoneVmExtensionPoliciesRestTransport._BaseList._get_unset_required_fields( + query_params + ) + ) + + return query_params + + class _BaseUpdate: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}", + "body": "vm_extension_policy_resource", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = compute.UpdateZoneVmExtensionPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=False + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=False, + ) + ) + query_params.update( + _BaseZoneVmExtensionPoliciesRestTransport._BaseUpdate._get_unset_required_fields( + query_params + ) + ) + + return query_params + + +__all__ = ("_BaseZoneVmExtensionPoliciesRestTransport",) diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py index 6bb502bf0977..fe4069fed7ed 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/__init__.py @@ -57,6 +57,7 @@ AggregatedListAcceleratorTypesRequest, AggregatedListAddressesRequest, AggregatedListAutoscalersRequest, + AggregatedListBackendBucketsRequest, AggregatedListBackendServicesRequest, AggregatedListDisksRequest, AggregatedListDiskTypesRequest, @@ -144,12 +145,15 @@ Backend, BackendBackendOrchestrationInfo, BackendBucket, + BackendBucketAggregatedList, BackendBucketCdnPolicy, BackendBucketCdnPolicyBypassCacheOnRequestHeader, BackendBucketCdnPolicyCacheKeyPolicy, BackendBucketCdnPolicyNegativeCachingPolicy, BackendBucketList, + BackendBucketListUsable, BackendBucketParams, + BackendBucketsScopedList, BackendBucketUsedBy, BackendCustomMetric, BackendService, @@ -202,12 +206,16 @@ BundledLocalSsds, CacheInvalidationRule, CacheKeyPolicy, + CachePolicy, + CachePolicyCacheKeyPolicy, + CachePolicyNegativeCachingPolicy, CalendarModeAdviceRequest, CalendarModeAdviceResponse, CalendarModeAdviceRpcRequest, CalendarModeRecommendation, CancelFutureReservationRequest, CancelInstanceGroupManagerResizeRequestRequest, + CancelRegionInstanceGroupManagerResizeRequestRequest, CircuitBreakers, CloneRulesFirewallPolicyRequest, CloneRulesNetworkFirewallPolicyRequest, @@ -215,11 +223,14 @@ Commitment, CommitmentAggregatedList, CommitmentList, + CommitmentParams, CommitmentResourceStatus, CommitmentsScopedList, CompositeHealthCheck, CompositeHealthCheckAggregatedList, + CompositeHealthCheckHealth, CompositeHealthCheckList, + CompositeHealthChecksGetHealthResponseHealthSourceHealth, CompositeHealthChecksScopedList, ConfidentialInstanceConfig, ConnectionDraining, @@ -269,6 +280,7 @@ DeleteInstancesInstanceGroupManagerRequest, DeleteInstancesRegionInstanceGroupManagerRequest, DeleteInstanceTemplateRequest, + DeleteInstantSnapshotGroupRequest, DeleteInstantSnapshotRequest, DeleteInterconnectAttachmentGroupRequest, DeleteInterconnectAttachmentRequest, @@ -292,6 +304,7 @@ DeletePublicAdvertisedPrefixeRequest, DeletePublicDelegatedPrefixeRequest, DeleteRegionAutoscalerRequest, + DeleteRegionBackendBucketRequest, DeleteRegionBackendServiceRequest, DeleteRegionCompositeHealthCheckRequest, DeleteRegionDiskRequest, @@ -300,7 +313,9 @@ DeleteRegionHealthCheckServiceRequest, DeleteRegionHealthSourceRequest, DeleteRegionInstanceGroupManagerRequest, + DeleteRegionInstanceGroupManagerResizeRequestRequest, DeleteRegionInstanceTemplateRequest, + DeleteRegionInstantSnapshotGroupRequest, DeleteRegionInstantSnapshotRequest, DeleteRegionNetworkEndpointGroupRequest, DeleteRegionNetworkFirewallPolicyRequest, @@ -308,6 +323,7 @@ DeleteRegionOperationRequest, DeleteRegionOperationResponse, DeleteRegionSecurityPolicyRequest, + DeleteRegionSnapshotRequest, DeleteRegionSslCertificateRequest, DeleteRegionSslPolicyRequest, DeleteRegionTargetHttpProxyRequest, @@ -342,6 +358,7 @@ DeleteWireGroupRequest, DeleteZoneOperationRequest, DeleteZoneOperationResponse, + DeleteZoneVmExtensionPolicyRequest, Denied, DeprecateImageRequest, DeprecationStatus, @@ -371,6 +388,7 @@ DiskTypeAggregatedList, DiskTypeList, DiskTypesScopedList, + DiskUpdateKmsKeyRequest, DisplayDevice, DistributionPolicy, DistributionPolicyZoneConfiguration, @@ -413,6 +431,7 @@ ForwardingRulesScopedList, FutureReservation, FutureReservationCommitmentInfo, + FutureReservationParams, FutureReservationsAggregatedListResponse, FutureReservationsListResponse, FutureReservationSpecificSKUProperties, @@ -466,6 +485,8 @@ GetHealthBackendServiceRequest, GetHealthCheckRequest, GetHealthRegionBackendServiceRequest, + GetHealthRegionCompositeHealthCheckRequest, + GetHealthRegionHealthSourceRequest, GetHealthTargetPoolRequest, GetIamPolicyBackendBucketRequest, GetIamPolicyBackendServiceRequest, @@ -474,6 +495,7 @@ GetIamPolicyImageRequest, GetIamPolicyInstanceRequest, GetIamPolicyInstanceTemplateRequest, + GetIamPolicyInstantSnapshotGroupRequest, GetIamPolicyInstantSnapshotRequest, GetIamPolicyInterconnectAttachmentGroupRequest, GetIamPolicyInterconnectGroupRequest, @@ -483,10 +505,13 @@ GetIamPolicyNetworkFirewallPolicyRequest, GetIamPolicyNodeGroupRequest, GetIamPolicyNodeTemplateRequest, + GetIamPolicyRegionBackendBucketRequest, GetIamPolicyRegionBackendServiceRequest, GetIamPolicyRegionDiskRequest, + GetIamPolicyRegionInstantSnapshotGroupRequest, GetIamPolicyRegionInstantSnapshotRequest, GetIamPolicyRegionNetworkFirewallPolicyRequest, + GetIamPolicyRegionSnapshotRequest, GetIamPolicyReservationBlockRequest, GetIamPolicyReservationRequest, GetIamPolicyReservationSubBlockRequest, @@ -503,6 +528,7 @@ GetInstanceRequest, GetInstanceSettingRequest, GetInstanceTemplateRequest, + GetInstantSnapshotGroupRequest, GetInstantSnapshotRequest, GetInterconnectAttachmentGroupRequest, GetInterconnectAttachmentRequest, @@ -536,6 +562,7 @@ GetPublicAdvertisedPrefixeRequest, GetPublicDelegatedPrefixeRequest, GetRegionAutoscalerRequest, + GetRegionBackendBucketRequest, GetRegionBackendServiceRequest, GetRegionCommitmentRequest, GetRegionCompositeHealthCheckRequest, @@ -546,8 +573,10 @@ GetRegionHealthCheckServiceRequest, GetRegionHealthSourceRequest, GetRegionInstanceGroupManagerRequest, + GetRegionInstanceGroupManagerResizeRequestRequest, GetRegionInstanceGroupRequest, GetRegionInstanceTemplateRequest, + GetRegionInstantSnapshotGroupRequest, GetRegionInstantSnapshotRequest, GetRegionNetworkEndpointGroupRequest, GetRegionNetworkFirewallPolicyRequest, @@ -555,6 +584,8 @@ GetRegionOperationRequest, GetRegionRequest, GetRegionSecurityPolicyRequest, + GetRegionSnapshotRequest, + GetRegionSnapshotSettingRequest, GetRegionSslCertificateRequest, GetRegionSslPolicyRequest, GetRegionTargetHttpProxyRequest, @@ -609,6 +640,7 @@ GetXpnResourcesProjectsRequest, GetZoneOperationRequest, GetZoneRequest, + GetZoneVmExtensionPolicyRequest, GlobalAddressesMoveRequest, GlobalNetworkEndpointGroupsAttachEndpointsRequest, GlobalNetworkEndpointGroupsDetachEndpointsRequest, @@ -639,7 +671,10 @@ HealthChecksScopedList, HealthSource, HealthSourceAggregatedList, + HealthSourceHealth, HealthSourceList, + HealthSourcesGetHealthResponseSourceInfo, + HealthSourcesGetHealthResponseSourceInfoBackendInfo, HealthSourcesScopedList, HealthStatus, HealthStatusForNetworkEndpoint, @@ -688,6 +723,7 @@ InsertInstanceGroupRequest, InsertInstanceRequest, InsertInstanceTemplateRequest, + InsertInstantSnapshotGroupRequest, InsertInstantSnapshotRequest, InsertInterconnectAttachmentGroupRequest, InsertInterconnectAttachmentRequest, @@ -707,6 +743,7 @@ InsertPublicAdvertisedPrefixeRequest, InsertPublicDelegatedPrefixeRequest, InsertRegionAutoscalerRequest, + InsertRegionBackendBucketRequest, InsertRegionBackendServiceRequest, InsertRegionCommitmentRequest, InsertRegionCompositeHealthCheckRequest, @@ -716,12 +753,15 @@ InsertRegionHealthCheckServiceRequest, InsertRegionHealthSourceRequest, InsertRegionInstanceGroupManagerRequest, + InsertRegionInstanceGroupManagerResizeRequestRequest, InsertRegionInstanceTemplateRequest, + InsertRegionInstantSnapshotGroupRequest, InsertRegionInstantSnapshotRequest, InsertRegionNetworkEndpointGroupRequest, InsertRegionNetworkFirewallPolicyRequest, InsertRegionNotificationEndpointRequest, InsertRegionSecurityPolicyRequest, + InsertRegionSnapshotRequest, InsertRegionSslCertificateRequest, InsertRegionSslPolicyRequest, InsertRegionTargetHttpProxyRequest, @@ -751,6 +791,7 @@ InsertVpnGatewayRequest, InsertVpnTunnelRequest, InsertWireGroupRequest, + InsertZoneVmExtensionPolicyRequest, Instance, InstanceAggregatedList, InstanceConsumptionData, @@ -798,6 +839,7 @@ InstanceGroupManagerStatusAllInstancesConfig, InstanceGroupManagerStatusBulkInstanceOperation, InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck, + InstanceGroupManagerStatusInstanceStatusSummary, InstanceGroupManagerStatusStateful, InstanceGroupManagerStatusStatefulPerInstanceConfigs, InstanceGroupManagerStatusVersionTarget, @@ -846,6 +888,10 @@ InstanceWithNamedPorts, InstantSnapshot, InstantSnapshotAggregatedList, + InstantSnapshotGroup, + InstantSnapshotGroupParameters, + InstantSnapshotGroupResourceStatus, + InstantSnapshotGroupSourceInfo, InstantSnapshotList, InstantSnapshotParams, InstantSnapshotResourceStatus, @@ -971,6 +1017,8 @@ ListInstancesRegionInstanceGroupsRequest, ListInstancesRequest, ListInstanceTemplatesRequest, + ListInstantSnapshotGroups, + ListInstantSnapshotGroupsRequest, ListInstantSnapshotsRequest, ListInterconnectAttachmentGroupsRequest, ListInterconnectAttachmentsRequest, @@ -1007,6 +1055,7 @@ ListPublicDelegatedPrefixesRequest, ListReferrersInstancesRequest, ListRegionAutoscalersRequest, + ListRegionBackendBucketsRequest, ListRegionBackendServicesRequest, ListRegionCommitmentsRequest, ListRegionCompositeHealthChecksRequest, @@ -1016,15 +1065,18 @@ ListRegionHealthCheckServicesRequest, ListRegionHealthChecksRequest, ListRegionHealthSourcesRequest, + ListRegionInstanceGroupManagerResizeRequestsRequest, ListRegionInstanceGroupManagersRequest, ListRegionInstanceGroupsRequest, ListRegionInstanceTemplatesRequest, + ListRegionInstantSnapshotGroupsRequest, ListRegionInstantSnapshotsRequest, ListRegionNetworkEndpointGroupsRequest, ListRegionNetworkFirewallPoliciesRequest, ListRegionNotificationEndpointsRequest, ListRegionOperationsRequest, ListRegionSecurityPoliciesRequest, + ListRegionSnapshotsRequest, ListRegionsRequest, ListRegionSslCertificatesRequest, ListRegionSslPoliciesRequest, @@ -1058,7 +1110,9 @@ ListTargetTcpProxiesRequest, ListTargetVpnGatewaysRequest, ListUrlMapsRequest, + ListUsableBackendBucketsRequest, ListUsableBackendServicesRequest, + ListUsableRegionBackendBucketsRequest, ListUsableRegionBackendServicesRequest, ListUsableSubnetworksRequest, ListVpnGatewaysRequest, @@ -1067,6 +1121,7 @@ ListXpnHostsProjectsRequest, ListZoneOperationsRequest, ListZonesRequest, + ListZoneVmExtensionPoliciesRequest, LocalDisk, LocalizedMessage, LocationPolicy, @@ -1218,6 +1273,7 @@ PatchPublicAdvertisedPrefixeRequest, PatchPublicDelegatedPrefixeRequest, PatchRegionAutoscalerRequest, + PatchRegionBackendBucketRequest, PatchRegionBackendServiceRequest, PatchRegionCompositeHealthCheckRequest, PatchRegionHealthAggregationPolicyRequest, @@ -1227,6 +1283,7 @@ PatchRegionInstanceGroupManagerRequest, PatchRegionNetworkFirewallPolicyRequest, PatchRegionSecurityPolicyRequest, + PatchRegionSnapshotSettingRequest, PatchRegionSslPolicyRequest, PatchRegionTargetHttpsProxyRequest, PatchRegionUrlMapRequest, @@ -1300,10 +1357,12 @@ RegionDisksResizeRequest, RegionDisksStartAsyncReplicationRequest, RegionDiskTypeList, + RegionDiskUpdateKmsKeyRequest, RegionInstanceGroupList, RegionInstanceGroupManagerDeleteInstanceConfigReq, RegionInstanceGroupManagerList, RegionInstanceGroupManagerPatchInstanceConfigReq, + RegionInstanceGroupManagerResizeRequestsListResponse, RegionInstanceGroupManagersAbandonInstancesRequest, RegionInstanceGroupManagersApplyUpdatesRequest, RegionInstanceGroupManagersCreateInstancesRequest, @@ -1329,6 +1388,7 @@ RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy, RegionSetLabelsRequest, RegionSetPolicyRequest, + RegionSnapshotUpdateKmsKeyRequest, RegionTargetHttpsProxiesSetSslCertificatesRequest, RegionUrlMapsValidateRequest, RemoveAssociationFirewallPolicyRequest, @@ -1525,6 +1585,7 @@ SetIamPolicyImageRequest, SetIamPolicyInstanceRequest, SetIamPolicyInstanceTemplateRequest, + SetIamPolicyInstantSnapshotGroupRequest, SetIamPolicyInstantSnapshotRequest, SetIamPolicyInterconnectAttachmentGroupRequest, SetIamPolicyInterconnectGroupRequest, @@ -1534,10 +1595,13 @@ SetIamPolicyNetworkFirewallPolicyRequest, SetIamPolicyNodeGroupRequest, SetIamPolicyNodeTemplateRequest, + SetIamPolicyRegionBackendBucketRequest, SetIamPolicyRegionBackendServiceRequest, SetIamPolicyRegionDiskRequest, + SetIamPolicyRegionInstantSnapshotGroupRequest, SetIamPolicyRegionInstantSnapshotRequest, SetIamPolicyRegionNetworkFirewallPolicyRequest, + SetIamPolicyRegionSnapshotRequest, SetIamPolicyReservationBlockRequest, SetIamPolicyReservationRequest, SetIamPolicyReservationSubBlockRequest, @@ -1563,6 +1627,7 @@ SetLabelsRegionDiskRequest, SetLabelsRegionInstantSnapshotRequest, SetLabelsRegionSecurityPolicyRequest, + SetLabelsRegionSnapshotRequest, SetLabelsSecurityPolicyRequest, SetLabelsSnapshotRequest, SetLabelsTargetVpnGatewayRequest, @@ -1613,11 +1678,15 @@ SimulateMaintenanceEventInstanceRequest, SimulateMaintenanceEventNodeGroupRequest, Snapshot, + SnapshotGroupParameters, SnapshotList, SnapshotParams, SnapshotSettings, + SnapshotSettingsAccessLocation, + SnapshotSettingsAccessLocationAccessLocationPreference, SnapshotSettingsStorageLocationSettings, SnapshotSettingsStorageLocationSettingsStorageLocationPreference, + SnapshotUpdateKmsKeyRequest, SourceDiskEncryptionKey, SourceInstanceParams, SourceInstanceProperties, @@ -1744,6 +1813,7 @@ TestIamPermissionsInstanceGroupRequest, TestIamPermissionsInstanceRequest, TestIamPermissionsInstanceTemplateRequest, + TestIamPermissionsInstantSnapshotGroupRequest, TestIamPermissionsInstantSnapshotRequest, TestIamPermissionsInterconnectAttachmentGroupRequest, TestIamPermissionsInterconnectGroupRequest, @@ -1757,6 +1827,7 @@ TestIamPermissionsNodeTemplateRequest, TestIamPermissionsPacketMirroringRequest, TestIamPermissionsRegionAutoscalerRequest, + TestIamPermissionsRegionBackendBucketRequest, TestIamPermissionsRegionBackendServiceRequest, TestIamPermissionsRegionCompositeHealthCheckRequest, TestIamPermissionsRegionDiskRequest, @@ -1765,9 +1836,11 @@ TestIamPermissionsRegionHealthCheckServiceRequest, TestIamPermissionsRegionHealthSourceRequest, TestIamPermissionsRegionInstanceGroupRequest, + TestIamPermissionsRegionInstantSnapshotGroupRequest, TestIamPermissionsRegionInstantSnapshotRequest, TestIamPermissionsRegionNetworkFirewallPolicyRequest, TestIamPermissionsRegionNotificationEndpointRequest, + TestIamPermissionsRegionSnapshotRequest, TestIamPermissionsReservationBlockRequest, TestIamPermissionsReservationRequest, TestIamPermissionsReservationSubBlockRequest, @@ -1797,6 +1870,10 @@ UpdateFutureReservationRequest, UpdateHealthCheckRequest, UpdateInstanceRequest, + UpdateKmsKeyDiskRequest, + UpdateKmsKeyRegionDiskRequest, + UpdateKmsKeyRegionSnapshotRequest, + UpdateKmsKeySnapshotRequest, UpdateLicenseRequest, UpdateNetworkInterfaceInstanceRequest, UpdatePeeringNetworkRequest, @@ -1816,6 +1893,7 @@ UpdateShieldedInstanceConfigInstanceRequest, UpdateStoragePoolRequest, UpdateUrlMapRequest, + UpdateZoneVmExtensionPolicyRequest, UrlMap, UrlMapList, UrlMapReference, @@ -1837,6 +1915,11 @@ VmEndpointNatMappingsInterfaceNatMappings, VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings, VmEndpointNatMappingsList, + VmExtensionPolicy, + VmExtensionPolicyExtensionPolicy, + VmExtensionPolicyInstanceSelector, + VmExtensionPolicyLabelSelector, + VmExtensionPolicyList, VpnGateway, VpnGatewayAggregatedList, VpnGatewayList, @@ -1929,6 +2012,7 @@ "AggregatedListAcceleratorTypesRequest", "AggregatedListAddressesRequest", "AggregatedListAutoscalersRequest", + "AggregatedListBackendBucketsRequest", "AggregatedListBackendServicesRequest", "AggregatedListDisksRequest", "AggregatedListDiskTypesRequest", @@ -2016,12 +2100,15 @@ "Backend", "BackendBackendOrchestrationInfo", "BackendBucket", + "BackendBucketAggregatedList", "BackendBucketCdnPolicy", "BackendBucketCdnPolicyBypassCacheOnRequestHeader", "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketListUsable", "BackendBucketParams", + "BackendBucketsScopedList", "BackendBucketUsedBy", "BackendCustomMetric", "BackendService", @@ -2074,12 +2161,16 @@ "BundledLocalSsds", "CacheInvalidationRule", "CacheKeyPolicy", + "CachePolicy", + "CachePolicyCacheKeyPolicy", + "CachePolicyNegativeCachingPolicy", "CalendarModeAdviceRequest", "CalendarModeAdviceResponse", "CalendarModeAdviceRpcRequest", "CalendarModeRecommendation", "CancelFutureReservationRequest", "CancelInstanceGroupManagerResizeRequestRequest", + "CancelRegionInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -2087,11 +2178,14 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConnectionDraining", @@ -2141,6 +2235,7 @@ "DeleteInstancesInstanceGroupManagerRequest", "DeleteInstancesRegionInstanceGroupManagerRequest", "DeleteInstanceTemplateRequest", + "DeleteInstantSnapshotGroupRequest", "DeleteInstantSnapshotRequest", "DeleteInterconnectAttachmentGroupRequest", "DeleteInterconnectAttachmentRequest", @@ -2164,6 +2259,7 @@ "DeletePublicAdvertisedPrefixeRequest", "DeletePublicDelegatedPrefixeRequest", "DeleteRegionAutoscalerRequest", + "DeleteRegionBackendBucketRequest", "DeleteRegionBackendServiceRequest", "DeleteRegionCompositeHealthCheckRequest", "DeleteRegionDiskRequest", @@ -2172,7 +2268,9 @@ "DeleteRegionHealthCheckServiceRequest", "DeleteRegionHealthSourceRequest", "DeleteRegionInstanceGroupManagerRequest", + "DeleteRegionInstanceGroupManagerResizeRequestRequest", "DeleteRegionInstanceTemplateRequest", + "DeleteRegionInstantSnapshotGroupRequest", "DeleteRegionInstantSnapshotRequest", "DeleteRegionNetworkEndpointGroupRequest", "DeleteRegionNetworkFirewallPolicyRequest", @@ -2180,6 +2278,7 @@ "DeleteRegionOperationRequest", "DeleteRegionOperationResponse", "DeleteRegionSecurityPolicyRequest", + "DeleteRegionSnapshotRequest", "DeleteRegionSslCertificateRequest", "DeleteRegionSslPolicyRequest", "DeleteRegionTargetHttpProxyRequest", @@ -2214,6 +2313,7 @@ "DeleteWireGroupRequest", "DeleteZoneOperationRequest", "DeleteZoneOperationResponse", + "DeleteZoneVmExtensionPolicyRequest", "Denied", "DeprecateImageRequest", "DeprecationStatus", @@ -2243,6 +2343,7 @@ "DiskTypeAggregatedList", "DiskTypeList", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisplayDevice", "DistributionPolicy", "DistributionPolicyZoneConfiguration", @@ -2285,6 +2386,7 @@ "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationsAggregatedListResponse", "FutureReservationsListResponse", "FutureReservationSpecificSKUProperties", @@ -2338,6 +2440,8 @@ "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -2346,6 +2450,7 @@ "GetIamPolicyImageRequest", "GetIamPolicyInstanceRequest", "GetIamPolicyInstanceTemplateRequest", + "GetIamPolicyInstantSnapshotGroupRequest", "GetIamPolicyInstantSnapshotRequest", "GetIamPolicyInterconnectAttachmentGroupRequest", "GetIamPolicyInterconnectGroupRequest", @@ -2355,10 +2460,13 @@ "GetIamPolicyNetworkFirewallPolicyRequest", "GetIamPolicyNodeGroupRequest", "GetIamPolicyNodeTemplateRequest", + "GetIamPolicyRegionBackendBucketRequest", "GetIamPolicyRegionBackendServiceRequest", "GetIamPolicyRegionDiskRequest", + "GetIamPolicyRegionInstantSnapshotGroupRequest", "GetIamPolicyRegionInstantSnapshotRequest", "GetIamPolicyRegionNetworkFirewallPolicyRequest", + "GetIamPolicyRegionSnapshotRequest", "GetIamPolicyReservationBlockRequest", "GetIamPolicyReservationRequest", "GetIamPolicyReservationSubBlockRequest", @@ -2375,6 +2483,7 @@ "GetInstanceRequest", "GetInstanceSettingRequest", "GetInstanceTemplateRequest", + "GetInstantSnapshotGroupRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentGroupRequest", "GetInterconnectAttachmentRequest", @@ -2408,6 +2517,7 @@ "GetPublicAdvertisedPrefixeRequest", "GetPublicDelegatedPrefixeRequest", "GetRegionAutoscalerRequest", + "GetRegionBackendBucketRequest", "GetRegionBackendServiceRequest", "GetRegionCommitmentRequest", "GetRegionCompositeHealthCheckRequest", @@ -2418,8 +2528,10 @@ "GetRegionHealthCheckServiceRequest", "GetRegionHealthSourceRequest", "GetRegionInstanceGroupManagerRequest", + "GetRegionInstanceGroupManagerResizeRequestRequest", "GetRegionInstanceGroupRequest", "GetRegionInstanceTemplateRequest", + "GetRegionInstantSnapshotGroupRequest", "GetRegionInstantSnapshotRequest", "GetRegionNetworkEndpointGroupRequest", "GetRegionNetworkFirewallPolicyRequest", @@ -2427,6 +2539,8 @@ "GetRegionOperationRequest", "GetRegionRequest", "GetRegionSecurityPolicyRequest", + "GetRegionSnapshotRequest", + "GetRegionSnapshotSettingRequest", "GetRegionSslCertificateRequest", "GetRegionSslPolicyRequest", "GetRegionTargetHttpProxyRequest", @@ -2481,6 +2595,7 @@ "GetXpnResourcesProjectsRequest", "GetZoneOperationRequest", "GetZoneRequest", + "GetZoneVmExtensionPolicyRequest", "GlobalAddressesMoveRequest", "GlobalNetworkEndpointGroupsAttachEndpointsRequest", "GlobalNetworkEndpointGroupsDetachEndpointsRequest", @@ -2511,7 +2626,10 @@ "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -2560,6 +2678,7 @@ "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", + "InsertInstantSnapshotGroupRequest", "InsertInstantSnapshotRequest", "InsertInterconnectAttachmentGroupRequest", "InsertInterconnectAttachmentRequest", @@ -2579,6 +2698,7 @@ "InsertPublicAdvertisedPrefixeRequest", "InsertPublicDelegatedPrefixeRequest", "InsertRegionAutoscalerRequest", + "InsertRegionBackendBucketRequest", "InsertRegionBackendServiceRequest", "InsertRegionCommitmentRequest", "InsertRegionCompositeHealthCheckRequest", @@ -2588,12 +2708,15 @@ "InsertRegionHealthCheckServiceRequest", "InsertRegionHealthSourceRequest", "InsertRegionInstanceGroupManagerRequest", + "InsertRegionInstanceGroupManagerResizeRequestRequest", "InsertRegionInstanceTemplateRequest", + "InsertRegionInstantSnapshotGroupRequest", "InsertRegionInstantSnapshotRequest", "InsertRegionNetworkEndpointGroupRequest", "InsertRegionNetworkFirewallPolicyRequest", "InsertRegionNotificationEndpointRequest", "InsertRegionSecurityPolicyRequest", + "InsertRegionSnapshotRequest", "InsertRegionSslCertificateRequest", "InsertRegionSslPolicyRequest", "InsertRegionTargetHttpProxyRequest", @@ -2623,6 +2746,7 @@ "InsertVpnGatewayRequest", "InsertVpnTunnelRequest", "InsertWireGroupRequest", + "InsertZoneVmExtensionPolicyRequest", "Instance", "InstanceAggregatedList", "InstanceConsumptionData", @@ -2670,6 +2794,7 @@ "InstanceGroupManagerStatusAllInstancesConfig", "InstanceGroupManagerStatusBulkInstanceOperation", "InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck", + "InstanceGroupManagerStatusInstanceStatusSummary", "InstanceGroupManagerStatusStateful", "InstanceGroupManagerStatusStatefulPerInstanceConfigs", "InstanceGroupManagerStatusVersionTarget", @@ -2718,6 +2843,10 @@ "InstanceWithNamedPorts", "InstantSnapshot", "InstantSnapshotAggregatedList", + "InstantSnapshotGroup", + "InstantSnapshotGroupParameters", + "InstantSnapshotGroupResourceStatus", + "InstantSnapshotGroupSourceInfo", "InstantSnapshotList", "InstantSnapshotParams", "InstantSnapshotResourceStatus", @@ -2843,6 +2972,8 @@ "ListInstancesRegionInstanceGroupsRequest", "ListInstancesRequest", "ListInstanceTemplatesRequest", + "ListInstantSnapshotGroups", + "ListInstantSnapshotGroupsRequest", "ListInstantSnapshotsRequest", "ListInterconnectAttachmentGroupsRequest", "ListInterconnectAttachmentsRequest", @@ -2879,6 +3010,7 @@ "ListPublicDelegatedPrefixesRequest", "ListReferrersInstancesRequest", "ListRegionAutoscalersRequest", + "ListRegionBackendBucketsRequest", "ListRegionBackendServicesRequest", "ListRegionCommitmentsRequest", "ListRegionCompositeHealthChecksRequest", @@ -2888,15 +3020,18 @@ "ListRegionHealthCheckServicesRequest", "ListRegionHealthChecksRequest", "ListRegionHealthSourcesRequest", + "ListRegionInstanceGroupManagerResizeRequestsRequest", "ListRegionInstanceGroupManagersRequest", "ListRegionInstanceGroupsRequest", "ListRegionInstanceTemplatesRequest", + "ListRegionInstantSnapshotGroupsRequest", "ListRegionInstantSnapshotsRequest", "ListRegionNetworkEndpointGroupsRequest", "ListRegionNetworkFirewallPoliciesRequest", "ListRegionNotificationEndpointsRequest", "ListRegionOperationsRequest", "ListRegionSecurityPoliciesRequest", + "ListRegionSnapshotsRequest", "ListRegionsRequest", "ListRegionSslCertificatesRequest", "ListRegionSslPoliciesRequest", @@ -2930,7 +3065,9 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendBucketsRequest", "ListUsableBackendServicesRequest", + "ListUsableRegionBackendBucketsRequest", "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", @@ -2939,6 +3076,7 @@ "ListXpnHostsProjectsRequest", "ListZoneOperationsRequest", "ListZonesRequest", + "ListZoneVmExtensionPoliciesRequest", "LocalDisk", "LocalizedMessage", "LocationPolicy", @@ -3090,6 +3228,7 @@ "PatchPublicAdvertisedPrefixeRequest", "PatchPublicDelegatedPrefixeRequest", "PatchRegionAutoscalerRequest", + "PatchRegionBackendBucketRequest", "PatchRegionBackendServiceRequest", "PatchRegionCompositeHealthCheckRequest", "PatchRegionHealthAggregationPolicyRequest", @@ -3099,6 +3238,7 @@ "PatchRegionInstanceGroupManagerRequest", "PatchRegionNetworkFirewallPolicyRequest", "PatchRegionSecurityPolicyRequest", + "PatchRegionSnapshotSettingRequest", "PatchRegionSslPolicyRequest", "PatchRegionTargetHttpsProxyRequest", "PatchRegionUrlMapRequest", @@ -3172,10 +3312,12 @@ "RegionDisksResizeRequest", "RegionDisksStartAsyncReplicationRequest", "RegionDiskTypeList", + "RegionDiskUpdateKmsKeyRequest", "RegionInstanceGroupList", "RegionInstanceGroupManagerDeleteInstanceConfigReq", "RegionInstanceGroupManagerList", "RegionInstanceGroupManagerPatchInstanceConfigReq", + "RegionInstanceGroupManagerResizeRequestsListResponse", "RegionInstanceGroupManagersAbandonInstancesRequest", "RegionInstanceGroupManagersApplyUpdatesRequest", "RegionInstanceGroupManagersCreateInstancesRequest", @@ -3201,6 +3343,7 @@ "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", "RegionSetPolicyRequest", + "RegionSnapshotUpdateKmsKeyRequest", "RegionTargetHttpsProxiesSetSslCertificatesRequest", "RegionUrlMapsValidateRequest", "RemoveAssociationFirewallPolicyRequest", @@ -3397,6 +3540,7 @@ "SetIamPolicyImageRequest", "SetIamPolicyInstanceRequest", "SetIamPolicyInstanceTemplateRequest", + "SetIamPolicyInstantSnapshotGroupRequest", "SetIamPolicyInstantSnapshotRequest", "SetIamPolicyInterconnectAttachmentGroupRequest", "SetIamPolicyInterconnectGroupRequest", @@ -3406,10 +3550,13 @@ "SetIamPolicyNetworkFirewallPolicyRequest", "SetIamPolicyNodeGroupRequest", "SetIamPolicyNodeTemplateRequest", + "SetIamPolicyRegionBackendBucketRequest", "SetIamPolicyRegionBackendServiceRequest", "SetIamPolicyRegionDiskRequest", + "SetIamPolicyRegionInstantSnapshotGroupRequest", "SetIamPolicyRegionInstantSnapshotRequest", "SetIamPolicyRegionNetworkFirewallPolicyRequest", + "SetIamPolicyRegionSnapshotRequest", "SetIamPolicyReservationBlockRequest", "SetIamPolicyReservationRequest", "SetIamPolicyReservationSubBlockRequest", @@ -3435,6 +3582,7 @@ "SetLabelsRegionDiskRequest", "SetLabelsRegionInstantSnapshotRequest", "SetLabelsRegionSecurityPolicyRequest", + "SetLabelsRegionSnapshotRequest", "SetLabelsSecurityPolicyRequest", "SetLabelsSnapshotRequest", "SetLabelsTargetVpnGatewayRequest", @@ -3485,11 +3633,15 @@ "SimulateMaintenanceEventInstanceRequest", "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", + "SnapshotGroupParameters", "SnapshotList", "SnapshotParams", "SnapshotSettings", + "SnapshotSettingsAccessLocation", + "SnapshotSettingsAccessLocationAccessLocationPreference", "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + "SnapshotUpdateKmsKeyRequest", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -3616,6 +3768,7 @@ "TestIamPermissionsInstanceGroupRequest", "TestIamPermissionsInstanceRequest", "TestIamPermissionsInstanceTemplateRequest", + "TestIamPermissionsInstantSnapshotGroupRequest", "TestIamPermissionsInstantSnapshotRequest", "TestIamPermissionsInterconnectAttachmentGroupRequest", "TestIamPermissionsInterconnectGroupRequest", @@ -3629,6 +3782,7 @@ "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", "TestIamPermissionsRegionAutoscalerRequest", + "TestIamPermissionsRegionBackendBucketRequest", "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionCompositeHealthCheckRequest", "TestIamPermissionsRegionDiskRequest", @@ -3637,9 +3791,11 @@ "TestIamPermissionsRegionHealthCheckServiceRequest", "TestIamPermissionsRegionHealthSourceRequest", "TestIamPermissionsRegionInstanceGroupRequest", + "TestIamPermissionsRegionInstantSnapshotGroupRequest", "TestIamPermissionsRegionInstantSnapshotRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsRegionNotificationEndpointRequest", + "TestIamPermissionsRegionSnapshotRequest", "TestIamPermissionsReservationBlockRequest", "TestIamPermissionsReservationRequest", "TestIamPermissionsReservationSubBlockRequest", @@ -3669,6 +3825,10 @@ "UpdateFutureReservationRequest", "UpdateHealthCheckRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNetworkInterfaceInstanceRequest", "UpdatePeeringNetworkRequest", @@ -3688,6 +3848,7 @@ "UpdateShieldedInstanceConfigInstanceRequest", "UpdateStoragePoolRequest", "UpdateUrlMapRequest", + "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", "UrlMapReference", @@ -3709,6 +3870,11 @@ "VmEndpointNatMappingsInterfaceNatMappings", "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", + "VmExtensionPolicy", + "VmExtensionPolicyExtensionPolicy", + "VmExtensionPolicyInstanceSelector", + "VmExtensionPolicyLabelSelector", + "VmExtensionPolicyList", "VpnGateway", "VpnGatewayAggregatedList", "VpnGatewayList", diff --git a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py index eb3daeee1382..cb5f36aede08 100644 --- a/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py +++ b/packages/google-cloud-compute/google/cloud/compute_v1/types/compute.py @@ -67,6 +67,7 @@ "AggregatedListAcceleratorTypesRequest", "AggregatedListAddressesRequest", "AggregatedListAutoscalersRequest", + "AggregatedListBackendBucketsRequest", "AggregatedListBackendServicesRequest", "AggregatedListDiskTypesRequest", "AggregatedListDisksRequest", @@ -153,13 +154,16 @@ "Backend", "BackendBackendOrchestrationInfo", "BackendBucket", + "BackendBucketAggregatedList", "BackendBucketCdnPolicy", "BackendBucketCdnPolicyBypassCacheOnRequestHeader", "BackendBucketCdnPolicyCacheKeyPolicy", "BackendBucketCdnPolicyNegativeCachingPolicy", "BackendBucketList", + "BackendBucketListUsable", "BackendBucketParams", "BackendBucketUsedBy", + "BackendBucketsScopedList", "BackendCustomMetric", "BackendService", "BackendServiceAggregatedList", @@ -211,12 +215,16 @@ "BundledLocalSsds", "CacheInvalidationRule", "CacheKeyPolicy", + "CachePolicy", + "CachePolicyCacheKeyPolicy", + "CachePolicyNegativeCachingPolicy", "CalendarModeAdviceRequest", "CalendarModeAdviceResponse", "CalendarModeAdviceRpcRequest", "CalendarModeRecommendation", "CancelFutureReservationRequest", "CancelInstanceGroupManagerResizeRequestRequest", + "CancelRegionInstanceGroupManagerResizeRequestRequest", "CircuitBreakers", "CloneRulesFirewallPolicyRequest", "CloneRulesNetworkFirewallPolicyRequest", @@ -224,11 +232,14 @@ "Commitment", "CommitmentAggregatedList", "CommitmentList", + "CommitmentParams", "CommitmentResourceStatus", "CommitmentsScopedList", "CompositeHealthCheck", "CompositeHealthCheckAggregatedList", + "CompositeHealthCheckHealth", "CompositeHealthCheckList", + "CompositeHealthChecksGetHealthResponseHealthSourceHealth", "CompositeHealthChecksScopedList", "ConfidentialInstanceConfig", "ConnectionDraining", @@ -278,6 +289,7 @@ "DeleteInstanceTemplateRequest", "DeleteInstancesInstanceGroupManagerRequest", "DeleteInstancesRegionInstanceGroupManagerRequest", + "DeleteInstantSnapshotGroupRequest", "DeleteInstantSnapshotRequest", "DeleteInterconnectAttachmentGroupRequest", "DeleteInterconnectAttachmentRequest", @@ -301,6 +313,7 @@ "DeletePublicAdvertisedPrefixeRequest", "DeletePublicDelegatedPrefixeRequest", "DeleteRegionAutoscalerRequest", + "DeleteRegionBackendBucketRequest", "DeleteRegionBackendServiceRequest", "DeleteRegionCompositeHealthCheckRequest", "DeleteRegionDiskRequest", @@ -309,7 +322,9 @@ "DeleteRegionHealthCheckServiceRequest", "DeleteRegionHealthSourceRequest", "DeleteRegionInstanceGroupManagerRequest", + "DeleteRegionInstanceGroupManagerResizeRequestRequest", "DeleteRegionInstanceTemplateRequest", + "DeleteRegionInstantSnapshotGroupRequest", "DeleteRegionInstantSnapshotRequest", "DeleteRegionNetworkEndpointGroupRequest", "DeleteRegionNetworkFirewallPolicyRequest", @@ -317,6 +332,7 @@ "DeleteRegionOperationRequest", "DeleteRegionOperationResponse", "DeleteRegionSecurityPolicyRequest", + "DeleteRegionSnapshotRequest", "DeleteRegionSslCertificateRequest", "DeleteRegionSslPolicyRequest", "DeleteRegionTargetHttpProxyRequest", @@ -351,6 +367,7 @@ "DeleteWireGroupRequest", "DeleteZoneOperationRequest", "DeleteZoneOperationResponse", + "DeleteZoneVmExtensionPolicyRequest", "Denied", "DeprecateImageRequest", "DeprecationStatus", @@ -374,6 +391,7 @@ "DiskTypeAggregatedList", "DiskTypeList", "DiskTypesScopedList", + "DiskUpdateKmsKeyRequest", "DisksAddResourcePoliciesRequest", "DisksRemoveResourcePoliciesRequest", "DisksResizeRequest", @@ -422,6 +440,7 @@ "ForwardingRulesScopedList", "FutureReservation", "FutureReservationCommitmentInfo", + "FutureReservationParams", "FutureReservationSpecificSKUProperties", "FutureReservationStatus", "FutureReservationStatusExistingMatchingUsageInfo", @@ -477,6 +496,8 @@ "GetHealthBackendServiceRequest", "GetHealthCheckRequest", "GetHealthRegionBackendServiceRequest", + "GetHealthRegionCompositeHealthCheckRequest", + "GetHealthRegionHealthSourceRequest", "GetHealthTargetPoolRequest", "GetIamPolicyBackendBucketRequest", "GetIamPolicyBackendServiceRequest", @@ -485,6 +506,7 @@ "GetIamPolicyImageRequest", "GetIamPolicyInstanceRequest", "GetIamPolicyInstanceTemplateRequest", + "GetIamPolicyInstantSnapshotGroupRequest", "GetIamPolicyInstantSnapshotRequest", "GetIamPolicyInterconnectAttachmentGroupRequest", "GetIamPolicyInterconnectGroupRequest", @@ -494,10 +516,13 @@ "GetIamPolicyNetworkFirewallPolicyRequest", "GetIamPolicyNodeGroupRequest", "GetIamPolicyNodeTemplateRequest", + "GetIamPolicyRegionBackendBucketRequest", "GetIamPolicyRegionBackendServiceRequest", "GetIamPolicyRegionDiskRequest", + "GetIamPolicyRegionInstantSnapshotGroupRequest", "GetIamPolicyRegionInstantSnapshotRequest", "GetIamPolicyRegionNetworkFirewallPolicyRequest", + "GetIamPolicyRegionSnapshotRequest", "GetIamPolicyReservationBlockRequest", "GetIamPolicyReservationRequest", "GetIamPolicyReservationSubBlockRequest", @@ -514,6 +539,7 @@ "GetInstanceRequest", "GetInstanceSettingRequest", "GetInstanceTemplateRequest", + "GetInstantSnapshotGroupRequest", "GetInstantSnapshotRequest", "GetInterconnectAttachmentGroupRequest", "GetInterconnectAttachmentRequest", @@ -547,6 +573,7 @@ "GetPublicAdvertisedPrefixeRequest", "GetPublicDelegatedPrefixeRequest", "GetRegionAutoscalerRequest", + "GetRegionBackendBucketRequest", "GetRegionBackendServiceRequest", "GetRegionCommitmentRequest", "GetRegionCompositeHealthCheckRequest", @@ -557,8 +584,10 @@ "GetRegionHealthCheckServiceRequest", "GetRegionHealthSourceRequest", "GetRegionInstanceGroupManagerRequest", + "GetRegionInstanceGroupManagerResizeRequestRequest", "GetRegionInstanceGroupRequest", "GetRegionInstanceTemplateRequest", + "GetRegionInstantSnapshotGroupRequest", "GetRegionInstantSnapshotRequest", "GetRegionNetworkEndpointGroupRequest", "GetRegionNetworkFirewallPolicyRequest", @@ -566,6 +595,8 @@ "GetRegionOperationRequest", "GetRegionRequest", "GetRegionSecurityPolicyRequest", + "GetRegionSnapshotRequest", + "GetRegionSnapshotSettingRequest", "GetRegionSslCertificateRequest", "GetRegionSslPolicyRequest", "GetRegionTargetHttpProxyRequest", @@ -620,6 +651,7 @@ "GetXpnResourcesProjectsRequest", "GetZoneOperationRequest", "GetZoneRequest", + "GetZoneVmExtensionPolicyRequest", "GlobalAddressesMoveRequest", "GlobalNetworkEndpointGroupsAttachEndpointsRequest", "GlobalNetworkEndpointGroupsDetachEndpointsRequest", @@ -651,7 +683,10 @@ "HealthChecksScopedList", "HealthSource", "HealthSourceAggregatedList", + "HealthSourceHealth", "HealthSourceList", + "HealthSourcesGetHealthResponseSourceInfo", + "HealthSourcesGetHealthResponseSourceInfoBackendInfo", "HealthSourcesScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", @@ -697,6 +732,7 @@ "InsertInstanceGroupRequest", "InsertInstanceRequest", "InsertInstanceTemplateRequest", + "InsertInstantSnapshotGroupRequest", "InsertInstantSnapshotRequest", "InsertInterconnectAttachmentGroupRequest", "InsertInterconnectAttachmentRequest", @@ -716,6 +752,7 @@ "InsertPublicAdvertisedPrefixeRequest", "InsertPublicDelegatedPrefixeRequest", "InsertRegionAutoscalerRequest", + "InsertRegionBackendBucketRequest", "InsertRegionBackendServiceRequest", "InsertRegionCommitmentRequest", "InsertRegionCompositeHealthCheckRequest", @@ -725,12 +762,15 @@ "InsertRegionHealthCheckServiceRequest", "InsertRegionHealthSourceRequest", "InsertRegionInstanceGroupManagerRequest", + "InsertRegionInstanceGroupManagerResizeRequestRequest", "InsertRegionInstanceTemplateRequest", + "InsertRegionInstantSnapshotGroupRequest", "InsertRegionInstantSnapshotRequest", "InsertRegionNetworkEndpointGroupRequest", "InsertRegionNetworkFirewallPolicyRequest", "InsertRegionNotificationEndpointRequest", "InsertRegionSecurityPolicyRequest", + "InsertRegionSnapshotRequest", "InsertRegionSslCertificateRequest", "InsertRegionSslPolicyRequest", "InsertRegionTargetHttpProxyRequest", @@ -760,6 +800,7 @@ "InsertVpnGatewayRequest", "InsertVpnTunnelRequest", "InsertWireGroupRequest", + "InsertZoneVmExtensionPolicyRequest", "Instance", "InstanceAggregatedList", "InstanceConsumptionData", @@ -790,6 +831,7 @@ "InstanceGroupManagerStatusAllInstancesConfig", "InstanceGroupManagerStatusBulkInstanceOperation", "InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck", + "InstanceGroupManagerStatusInstanceStatusSummary", "InstanceGroupManagerStatusStateful", "InstanceGroupManagerStatusStatefulPerInstanceConfigs", "InstanceGroupManagerStatusVersionTarget", @@ -855,6 +897,10 @@ "InstancesStartWithEncryptionKeyRequest", "InstantSnapshot", "InstantSnapshotAggregatedList", + "InstantSnapshotGroup", + "InstantSnapshotGroupParameters", + "InstantSnapshotGroupResourceStatus", + "InstantSnapshotGroupSourceInfo", "InstantSnapshotList", "InstantSnapshotParams", "InstantSnapshotResourceStatus", @@ -980,6 +1026,8 @@ "ListInstancesInstanceGroupsRequest", "ListInstancesRegionInstanceGroupsRequest", "ListInstancesRequest", + "ListInstantSnapshotGroups", + "ListInstantSnapshotGroupsRequest", "ListInstantSnapshotsRequest", "ListInterconnectAttachmentGroupsRequest", "ListInterconnectAttachmentsRequest", @@ -1016,6 +1064,7 @@ "ListPublicDelegatedPrefixesRequest", "ListReferrersInstancesRequest", "ListRegionAutoscalersRequest", + "ListRegionBackendBucketsRequest", "ListRegionBackendServicesRequest", "ListRegionCommitmentsRequest", "ListRegionCompositeHealthChecksRequest", @@ -1025,15 +1074,18 @@ "ListRegionHealthCheckServicesRequest", "ListRegionHealthChecksRequest", "ListRegionHealthSourcesRequest", + "ListRegionInstanceGroupManagerResizeRequestsRequest", "ListRegionInstanceGroupManagersRequest", "ListRegionInstanceGroupsRequest", "ListRegionInstanceTemplatesRequest", + "ListRegionInstantSnapshotGroupsRequest", "ListRegionInstantSnapshotsRequest", "ListRegionNetworkEndpointGroupsRequest", "ListRegionNetworkFirewallPoliciesRequest", "ListRegionNotificationEndpointsRequest", "ListRegionOperationsRequest", "ListRegionSecurityPoliciesRequest", + "ListRegionSnapshotsRequest", "ListRegionSslCertificatesRequest", "ListRegionSslPoliciesRequest", "ListRegionTargetHttpProxiesRequest", @@ -1067,7 +1119,9 @@ "ListTargetTcpProxiesRequest", "ListTargetVpnGatewaysRequest", "ListUrlMapsRequest", + "ListUsableBackendBucketsRequest", "ListUsableBackendServicesRequest", + "ListUsableRegionBackendBucketsRequest", "ListUsableRegionBackendServicesRequest", "ListUsableSubnetworksRequest", "ListVpnGatewaysRequest", @@ -1075,6 +1129,7 @@ "ListWireGroupsRequest", "ListXpnHostsProjectsRequest", "ListZoneOperationsRequest", + "ListZoneVmExtensionPoliciesRequest", "ListZonesRequest", "LocalDisk", "LocalizedMessage", @@ -1227,6 +1282,7 @@ "PatchPublicAdvertisedPrefixeRequest", "PatchPublicDelegatedPrefixeRequest", "PatchRegionAutoscalerRequest", + "PatchRegionBackendBucketRequest", "PatchRegionBackendServiceRequest", "PatchRegionCompositeHealthCheckRequest", "PatchRegionHealthAggregationPolicyRequest", @@ -1236,6 +1292,7 @@ "PatchRegionInstanceGroupManagerRequest", "PatchRegionNetworkFirewallPolicyRequest", "PatchRegionSecurityPolicyRequest", + "PatchRegionSnapshotSettingRequest", "PatchRegionSslPolicyRequest", "PatchRegionTargetHttpsProxyRequest", "PatchRegionUrlMapRequest", @@ -1305,6 +1362,7 @@ "RegionAddressesMoveRequest", "RegionAutoscalerList", "RegionDiskTypeList", + "RegionDiskUpdateKmsKeyRequest", "RegionDisksAddResourcePoliciesRequest", "RegionDisksRemoveResourcePoliciesRequest", "RegionDisksResizeRequest", @@ -1313,6 +1371,7 @@ "RegionInstanceGroupManagerDeleteInstanceConfigReq", "RegionInstanceGroupManagerList", "RegionInstanceGroupManagerPatchInstanceConfigReq", + "RegionInstanceGroupManagerResizeRequestsListResponse", "RegionInstanceGroupManagerUpdateInstanceConfigReq", "RegionInstanceGroupManagersAbandonInstancesRequest", "RegionInstanceGroupManagersApplyUpdatesRequest", @@ -1338,6 +1397,7 @@ "RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponseEffectiveFirewallPolicy", "RegionSetLabelsRequest", "RegionSetPolicyRequest", + "RegionSnapshotUpdateKmsKeyRequest", "RegionTargetHttpsProxiesSetSslCertificatesRequest", "RegionUrlMapsValidateRequest", "RemoveAssociationFirewallPolicyRequest", @@ -1535,6 +1595,7 @@ "SetIamPolicyImageRequest", "SetIamPolicyInstanceRequest", "SetIamPolicyInstanceTemplateRequest", + "SetIamPolicyInstantSnapshotGroupRequest", "SetIamPolicyInstantSnapshotRequest", "SetIamPolicyInterconnectAttachmentGroupRequest", "SetIamPolicyInterconnectGroupRequest", @@ -1544,10 +1605,13 @@ "SetIamPolicyNetworkFirewallPolicyRequest", "SetIamPolicyNodeGroupRequest", "SetIamPolicyNodeTemplateRequest", + "SetIamPolicyRegionBackendBucketRequest", "SetIamPolicyRegionBackendServiceRequest", "SetIamPolicyRegionDiskRequest", + "SetIamPolicyRegionInstantSnapshotGroupRequest", "SetIamPolicyRegionInstantSnapshotRequest", "SetIamPolicyRegionNetworkFirewallPolicyRequest", + "SetIamPolicyRegionSnapshotRequest", "SetIamPolicyReservationBlockRequest", "SetIamPolicyReservationRequest", "SetIamPolicyReservationSubBlockRequest", @@ -1573,6 +1637,7 @@ "SetLabelsRegionDiskRequest", "SetLabelsRegionInstantSnapshotRequest", "SetLabelsRegionSecurityPolicyRequest", + "SetLabelsRegionSnapshotRequest", "SetLabelsSecurityPolicyRequest", "SetLabelsSnapshotRequest", "SetLabelsTargetVpnGatewayRequest", @@ -1623,11 +1688,15 @@ "SimulateMaintenanceEventInstanceRequest", "SimulateMaintenanceEventNodeGroupRequest", "Snapshot", + "SnapshotGroupParameters", "SnapshotList", "SnapshotParams", "SnapshotSettings", + "SnapshotSettingsAccessLocation", + "SnapshotSettingsAccessLocationAccessLocationPreference", "SnapshotSettingsStorageLocationSettings", "SnapshotSettingsStorageLocationSettingsStorageLocationPreference", + "SnapshotUpdateKmsKeyRequest", "SourceDiskEncryptionKey", "SourceInstanceParams", "SourceInstanceProperties", @@ -1753,6 +1822,7 @@ "TestIamPermissionsInstanceGroupRequest", "TestIamPermissionsInstanceRequest", "TestIamPermissionsInstanceTemplateRequest", + "TestIamPermissionsInstantSnapshotGroupRequest", "TestIamPermissionsInstantSnapshotRequest", "TestIamPermissionsInterconnectAttachmentGroupRequest", "TestIamPermissionsInterconnectGroupRequest", @@ -1766,6 +1836,7 @@ "TestIamPermissionsNodeTemplateRequest", "TestIamPermissionsPacketMirroringRequest", "TestIamPermissionsRegionAutoscalerRequest", + "TestIamPermissionsRegionBackendBucketRequest", "TestIamPermissionsRegionBackendServiceRequest", "TestIamPermissionsRegionCompositeHealthCheckRequest", "TestIamPermissionsRegionDiskRequest", @@ -1774,9 +1845,11 @@ "TestIamPermissionsRegionHealthCheckServiceRequest", "TestIamPermissionsRegionHealthSourceRequest", "TestIamPermissionsRegionInstanceGroupRequest", + "TestIamPermissionsRegionInstantSnapshotGroupRequest", "TestIamPermissionsRegionInstantSnapshotRequest", "TestIamPermissionsRegionNetworkFirewallPolicyRequest", "TestIamPermissionsRegionNotificationEndpointRequest", + "TestIamPermissionsRegionSnapshotRequest", "TestIamPermissionsReservationBlockRequest", "TestIamPermissionsReservationRequest", "TestIamPermissionsReservationSubBlockRequest", @@ -1806,6 +1879,10 @@ "UpdateFutureReservationRequest", "UpdateHealthCheckRequest", "UpdateInstanceRequest", + "UpdateKmsKeyDiskRequest", + "UpdateKmsKeyRegionDiskRequest", + "UpdateKmsKeyRegionSnapshotRequest", + "UpdateKmsKeySnapshotRequest", "UpdateLicenseRequest", "UpdateNetworkInterfaceInstanceRequest", "UpdatePeeringNetworkRequest", @@ -1825,6 +1902,7 @@ "UpdateShieldedInstanceConfigInstanceRequest", "UpdateStoragePoolRequest", "UpdateUrlMapRequest", + "UpdateZoneVmExtensionPolicyRequest", "UrlMap", "UrlMapList", "UrlMapReference", @@ -1846,6 +1924,11 @@ "VmEndpointNatMappingsInterfaceNatMappings", "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", + "VmExtensionPolicy", + "VmExtensionPolicyExtensionPolicy", + "VmExtensionPolicyInstanceSelector", + "VmExtensionPolicyLabelSelector", + "VmExtensionPolicyList", "VpnGateway", "VpnGatewayAggregatedList", "VpnGatewayList", @@ -5639,8 +5722,8 @@ class AggregatedListAutoscalersRequest(proto.Message): ) -class AggregatedListBackendServicesRequest(proto.Message): - r"""A request message for BackendServices.AggregatedList. See the +class AggregatedListBackendBucketsRequest(proto.Message): + r"""A request message for BackendBuckets.AggregatedList. See the method description for details. @@ -5825,8 +5908,8 @@ class AggregatedListBackendServicesRequest(proto.Message): ) -class AggregatedListDiskTypesRequest(proto.Message): - r"""A request message for DiskTypes.AggregatedList. See the +class AggregatedListBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.AggregatedList. See the method description for details. @@ -5950,7 +6033,7 @@ class AggregatedListDiskTypesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -6011,9 +6094,9 @@ class AggregatedListDiskTypesRequest(proto.Message): ) -class AggregatedListDisksRequest(proto.Message): - r"""A request message for Disks.AggregatedList. See the method - description for details. +class AggregatedListDiskTypesRequest(proto.Message): + r"""A request message for DiskTypes.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6197,9 +6280,9 @@ class AggregatedListDisksRequest(proto.Message): ) -class AggregatedListForwardingRulesRequest(proto.Message): - r"""A request message for ForwardingRules.AggregatedList. See the - method description for details. +class AggregatedListDisksRequest(proto.Message): + r"""A request message for Disks.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6383,9 +6466,9 @@ class AggregatedListForwardingRulesRequest(proto.Message): ) -class AggregatedListFutureReservationsRequest(proto.Message): - r"""A request message for FutureReservations.AggregatedList. See - the method description for details. +class AggregatedListForwardingRulesRequest(proto.Message): + r"""A request message for ForwardingRules.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -6569,8 +6652,8 @@ class AggregatedListFutureReservationsRequest(proto.Message): ) -class AggregatedListGlobalOperationsRequest(proto.Message): - r"""A request message for GlobalOperations.AggregatedList. See +class AggregatedListFutureReservationsRequest(proto.Message): + r"""A request message for FutureReservations.AggregatedList. See the method description for details. @@ -6755,195 +6838,9 @@ class AggregatedListGlobalOperationsRequest(proto.Message): ) -class AggregatedListHealthChecksRequest(proto.Message): - r"""A request message for HealthChecks.AggregatedList. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - include_all_scopes (bool): - Indicates whether every visible scope for - each scope type (zone, region, global) should be - included in the response. For new resource types - added after this field, the flag has no effect - as new resource types will always include every - visible scope for each scope type in response. - For resource types which predate this field, if - this flag is omitted or false, only scopes of - the scope types where the resource type is - expected to be found will be included. - - This field is a member of `oneof`_ ``_include_all_scopes``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Name of the project scoping this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - - This field is a member of `oneof`_ ``_return_partial_success``. - service_project_number (int): - The Shared VPC service project id or service - project number for which aggregated list request - is invoked for subnetworks list-usable api. - - This field is a member of `oneof`_ ``_service_project_number``. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - include_all_scopes: bool = proto.Field( - proto.BOOL, - number=391327988, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - service_project_number: int = proto.Field( - proto.INT64, - number=316757497, - optional=True, - ) - - -class AggregatedListInstanceGroupManagersRequest(proto.Message): - r"""A request message for InstanceGroupManagers.AggregatedList. - See the method description for details. +class AggregatedListGlobalOperationsRequest(proto.Message): + r"""A request message for GlobalOperations.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7127,8 +7024,8 @@ class AggregatedListInstanceGroupManagersRequest(proto.Message): ) -class AggregatedListInstanceGroupsRequest(proto.Message): - r"""A request message for InstanceGroups.AggregatedList. See the +class AggregatedListHealthChecksRequest(proto.Message): + r"""A request message for HealthChecks.AggregatedList. See the method description for details. @@ -7252,7 +7149,7 @@ class AggregatedListInstanceGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -7313,9 +7210,9 @@ class AggregatedListInstanceGroupsRequest(proto.Message): ) -class AggregatedListInstanceTemplatesRequest(proto.Message): - r"""A request message for InstanceTemplates.AggregatedList. See - the method description for details. +class AggregatedListInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -7438,7 +7335,7 @@ class AggregatedListInstanceTemplatesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -7499,8 +7396,8 @@ class AggregatedListInstanceTemplatesRequest(proto.Message): ) -class AggregatedListInstancesRequest(proto.Message): - r"""A request message for Instances.AggregatedList. See the +class AggregatedListInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.AggregatedList. See the method description for details. @@ -7685,8 +7582,8 @@ class AggregatedListInstancesRequest(proto.Message): ) -class AggregatedListInstantSnapshotsRequest(proto.Message): - r"""A request message for InstantSnapshots.AggregatedList. See +class AggregatedListInstanceTemplatesRequest(proto.Message): + r"""A request message for InstanceTemplates.AggregatedList. See the method description for details. @@ -7810,7 +7707,7 @@ class AggregatedListInstantSnapshotsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -7871,9 +7768,9 @@ class AggregatedListInstantSnapshotsRequest(proto.Message): ) -class AggregatedListInterconnectAttachmentsRequest(proto.Message): - r"""A request message for InterconnectAttachments.AggregatedList. - See the method description for details. +class AggregatedListInstancesRequest(proto.Message): + r"""A request message for Instances.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8057,9 +7954,9 @@ class AggregatedListInterconnectAttachmentsRequest(proto.Message): ) -class AggregatedListMachineTypesRequest(proto.Message): - r"""A request message for MachineTypes.AggregatedList. See the - method description for details. +class AggregatedListInstantSnapshotsRequest(proto.Message): + r"""A request message for InstantSnapshots.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8243,9 +8140,9 @@ class AggregatedListMachineTypesRequest(proto.Message): ) -class AggregatedListNetworkAttachmentsRequest(proto.Message): - r"""A request message for NetworkAttachments.AggregatedList. See - the method description for details. +class AggregatedListInterconnectAttachmentsRequest(proto.Message): + r"""A request message for InterconnectAttachments.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8429,10 +8326,9 @@ class AggregatedListNetworkAttachmentsRequest(proto.Message): ) -class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): - r"""A request message for - NetworkEdgeSecurityServices.AggregatedList. See the method - description for details. +class AggregatedListMachineTypesRequest(proto.Message): + r"""A request message for MachineTypes.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8555,7 +8451,7 @@ class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -8616,9 +8512,9 @@ class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): ) -class AggregatedListNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for NetworkEndpointGroups.AggregatedList. - See the method description for details. +class AggregatedListNetworkAttachmentsRequest(proto.Message): + r"""A request message for NetworkAttachments.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8802,9 +8698,10 @@ class AggregatedListNetworkEndpointGroupsRequest(proto.Message): ) -class AggregatedListNetworkFirewallPoliciesRequest(proto.Message): - r"""A request message for NetworkFirewallPolicies.AggregatedList. - See the method description for details. +class AggregatedListNetworkEdgeSecurityServicesRequest(proto.Message): + r"""A request message for + NetworkEdgeSecurityServices.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -8927,7 +8824,7 @@ class AggregatedListNetworkFirewallPoliciesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -8988,9 +8885,9 @@ class AggregatedListNetworkFirewallPoliciesRequest(proto.Message): ) -class AggregatedListNodeGroupsRequest(proto.Message): - r"""A request message for NodeGroups.AggregatedList. See the - method description for details. +class AggregatedListNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -9174,9 +9071,9 @@ class AggregatedListNodeGroupsRequest(proto.Message): ) -class AggregatedListNodeTemplatesRequest(proto.Message): - r"""A request message for NodeTemplates.AggregatedList. See the - method description for details. +class AggregatedListNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -9360,8 +9257,8 @@ class AggregatedListNodeTemplatesRequest(proto.Message): ) -class AggregatedListNodeTypesRequest(proto.Message): - r"""A request message for NodeTypes.AggregatedList. See the +class AggregatedListNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.AggregatedList. See the method description for details. @@ -9546,9 +9443,9 @@ class AggregatedListNodeTypesRequest(proto.Message): ) -class AggregatedListPacketMirroringsRequest(proto.Message): - r"""A request message for PacketMirrorings.AggregatedList. See - the method description for details. +class AggregatedListNodeTemplatesRequest(proto.Message): + r"""A request message for NodeTemplates.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -9732,9 +9629,9 @@ class AggregatedListPacketMirroringsRequest(proto.Message): ) -class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): - r"""A request message for PublicDelegatedPrefixes.AggregatedList. - See the method description for details. +class AggregatedListNodeTypesRequest(proto.Message): + r"""A request message for NodeTypes.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -9857,7 +9754,7 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -9918,8 +9815,8 @@ class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): ) -class AggregatedListRegionCommitmentsRequest(proto.Message): - r"""A request message for RegionCommitments.AggregatedList. See +class AggregatedListPacketMirroringsRequest(proto.Message): + r"""A request message for PacketMirrorings.AggregatedList. See the method description for details. @@ -10104,10 +10001,9 @@ class AggregatedListRegionCommitmentsRequest(proto.Message): ) -class AggregatedListRegionCompositeHealthChecksRequest(proto.Message): - r"""A request message for - RegionCompositeHealthChecks.AggregatedList. See the method - description for details. +class AggregatedListPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.AggregatedList. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -10291,9 +10187,195 @@ class AggregatedListRegionCompositeHealthChecksRequest(proto.Message): ) -class AggregatedListRegionHealthAggregationPoliciesRequest(proto.Message): +class AggregatedListRegionCommitmentsRequest(proto.Message): + r"""A request message for RegionCommitments.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + The Shared VPC service project id or service + project number for which aggregated list request + is invoked for subnetworks list-usable api. + + This field is a member of `oneof`_ ``_service_project_number``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + +class AggregatedListRegionCompositeHealthChecksRequest(proto.Message): r"""A request message for - RegionHealthAggregationPolicies.AggregatedList. See the method + RegionCompositeHealthChecks.AggregatedList. See the method description for details. @@ -10478,9 +10560,9 @@ class AggregatedListRegionHealthAggregationPoliciesRequest(proto.Message): ) -class AggregatedListRegionHealthCheckServicesRequest(proto.Message): +class AggregatedListRegionHealthAggregationPoliciesRequest(proto.Message): r"""A request message for - RegionHealthCheckServices.AggregatedList. See the method + RegionHealthAggregationPolicies.AggregatedList. See the method description for details. @@ -10665,9 +10747,10 @@ class AggregatedListRegionHealthCheckServicesRequest(proto.Message): ) -class AggregatedListRegionHealthSourcesRequest(proto.Message): - r"""A request message for RegionHealthSources.AggregatedList. See - the method description for details. +class AggregatedListRegionHealthCheckServicesRequest(proto.Message): + r"""A request message for + RegionHealthCheckServices.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -10851,10 +10934,9 @@ class AggregatedListRegionHealthSourcesRequest(proto.Message): ) -class AggregatedListRegionNotificationEndpointsRequest(proto.Message): - r"""A request message for - RegionNotificationEndpoints.AggregatedList. See the method - description for details. +class AggregatedListRegionHealthSourcesRequest(proto.Message): + r"""A request message for RegionHealthSources.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -11038,9 +11120,10 @@ class AggregatedListRegionNotificationEndpointsRequest(proto.Message): ) -class AggregatedListReservationsRequest(proto.Message): - r"""A request message for Reservations.AggregatedList. See the - method description for details. +class AggregatedListRegionNotificationEndpointsRequest(proto.Message): + r"""A request message for + RegionNotificationEndpoints.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -11163,7 +11246,7 @@ class AggregatedListReservationsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -11224,9 +11307,9 @@ class AggregatedListReservationsRequest(proto.Message): ) -class AggregatedListResourcePoliciesRequest(proto.Message): - r"""A request message for ResourcePolicies.AggregatedList. See - the method description for details. +class AggregatedListReservationsRequest(proto.Message): + r"""A request message for Reservations.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -11410,9 +11493,9 @@ class AggregatedListResourcePoliciesRequest(proto.Message): ) -class AggregatedListRoutersRequest(proto.Message): - r"""A request message for Routers.AggregatedList. See the method - description for details. +class AggregatedListResourcePoliciesRequest(proto.Message): + r"""A request message for ResourcePolicies.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -11596,9 +11679,9 @@ class AggregatedListRoutersRequest(proto.Message): ) -class AggregatedListSecurityPoliciesRequest(proto.Message): - r"""A request message for SecurityPolicies.AggregatedList. See - the method description for details. +class AggregatedListRoutersRequest(proto.Message): + r"""A request message for Routers.AggregatedList. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -11721,7 +11804,7 @@ class AggregatedListSecurityPoliciesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -11782,8 +11865,8 @@ class AggregatedListSecurityPoliciesRequest(proto.Message): ) -class AggregatedListServiceAttachmentsRequest(proto.Message): - r"""A request message for ServiceAttachments.AggregatedList. See +class AggregatedListSecurityPoliciesRequest(proto.Message): + r"""A request message for SecurityPolicies.AggregatedList. See the method description for details. @@ -11968,9 +12051,9 @@ class AggregatedListServiceAttachmentsRequest(proto.Message): ) -class AggregatedListSslCertificatesRequest(proto.Message): - r"""A request message for SslCertificates.AggregatedList. See the - method description for details. +class AggregatedListServiceAttachmentsRequest(proto.Message): + r"""A request message for ServiceAttachments.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -12154,8 +12237,8 @@ class AggregatedListSslCertificatesRequest(proto.Message): ) -class AggregatedListSslPoliciesRequest(proto.Message): - r"""A request message for SslPolicies.AggregatedList. See the +class AggregatedListSslCertificatesRequest(proto.Message): + r"""A request message for SslCertificates.AggregatedList. See the method description for details. @@ -12340,194 +12423,8 @@ class AggregatedListSslPoliciesRequest(proto.Message): ) -class AggregatedListStoragePoolTypesRequest(proto.Message): - r"""A request message for StoragePoolTypes.AggregatedList. See - the method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - include_all_scopes (bool): - Indicates whether every visible scope for - each scope type (zone, region, global) should be - included in the response. For new resource types - added after this field, the flag has no effect - as new resource types will always include every - visible scope for each scope type in response. - For resource types which predate this field, if - this flag is omitted or false, only scopes of - the scope types where the resource type is - expected to be found will be included. - - This field is a member of `oneof`_ ``_include_all_scopes``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - - This field is a member of `oneof`_ ``_return_partial_success``. - service_project_number (int): - The Shared VPC service project id or service - project number for which aggregated list request - is invoked for subnetworks list-usable api. - - This field is a member of `oneof`_ ``_service_project_number``. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - include_all_scopes: bool = proto.Field( - proto.BOOL, - number=391327988, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - service_project_number: int = proto.Field( - proto.INT64, - number=316757497, - optional=True, - ) - - -class AggregatedListStoragePoolsRequest(proto.Message): - r"""A request message for StoragePools.AggregatedList. See the +class AggregatedListSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.AggregatedList. See the method description for details. @@ -12651,7 +12548,7 @@ class AggregatedListStoragePoolsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Project ID for this request. + Name of the project scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -12712,9 +12609,9 @@ class AggregatedListStoragePoolsRequest(proto.Message): ) -class AggregatedListSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.AggregatedList. See the - method description for details. +class AggregatedListStoragePoolTypesRequest(proto.Message): + r"""A request message for StoragePoolTypes.AggregatedList. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -12855,42 +12752,193 @@ class AggregatedListSubnetworksRequest(proto.Message): is invoked for subnetworks list-usable api. This field is a member of `oneof`_ ``_service_project_number``. - views (str): - Defines the extra views returned back in the subnetwork - resource. Supported values: + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + +class AggregatedListStoragePoolsRequest(proto.Message): + r"""A request message for StoragePools.AggregatedList. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: :: - - WITH_UTILIZATION: Utilization data is included in the - response. + labels.owner:* - Check the Views enum for the list of possible values. + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. - This field is a member of `oneof`_ ``_views``. - """ + To filter on multiple expressions, provide each separate + expression within parentheses. For example: - class Views(proto.Enum): - r"""Defines the extra views returned back in the subnetwork resource. - Supported values: + :: - :: + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") - - WITH_UTILIZATION: Utilization data is included in the - response. + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: - Values: - UNDEFINED_VIEWS (0): - A value indicating that the enum field is not - set. - DEFAULT (115302945): - No description available. - WITH_UTILIZATION (504090633): - Utilization data is included in the response. - """ + :: - UNDEFINED_VIEWS = 0 - DEFAULT = 115302945 - WITH_UTILIZATION = 504090633 + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + The Shared VPC service project id or service + project number for which aggregated list request + is invoked for subnetworks list-usable api. + + This field is a member of `oneof`_ ``_service_project_number``. + """ filter: str = proto.Field( proto.STRING, @@ -12931,16 +12979,11 @@ class Views(proto.Enum): number=316757497, optional=True, ) - views: str = proto.Field( - proto.STRING, - number=112204398, - optional=True, - ) -class AggregatedListTargetHttpProxiesRequest(proto.Message): - r"""A request message for TargetHttpProxies.AggregatedList. See - the method description for details. +class AggregatedListSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -13063,7 +13106,7 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): - Name of the project scoping this request. + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -13081,8 +13124,43 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): is invoked for subnetworks list-usable api. This field is a member of `oneof`_ ``_service_project_number``. + views (str): + Defines the extra views returned back in the subnetwork + resource. Supported values: + + :: + + - WITH_UTILIZATION: Utilization data is included in the + response. + + Check the Views enum for the list of possible values. + + This field is a member of `oneof`_ ``_views``. """ + class Views(proto.Enum): + r"""Defines the extra views returned back in the subnetwork resource. + Supported values: + + :: + + - WITH_UTILIZATION: Utilization data is included in the + response. + + Values: + UNDEFINED_VIEWS (0): + A value indicating that the enum field is not + set. + DEFAULT (115302945): + No description available. + WITH_UTILIZATION (504090633): + Utilization data is included in the response. + """ + + UNDEFINED_VIEWS = 0 + DEFAULT = 115302945 + WITH_UTILIZATION = 504090633 + filter: str = proto.Field( proto.STRING, number=336120696, @@ -13122,10 +13200,15 @@ class AggregatedListTargetHttpProxiesRequest(proto.Message): number=316757497, optional=True, ) + views: str = proto.Field( + proto.STRING, + number=112204398, + optional=True, + ) -class AggregatedListTargetHttpsProxiesRequest(proto.Message): - r"""A request message for TargetHttpsProxies.AggregatedList. See +class AggregatedListTargetHttpProxiesRequest(proto.Message): + r"""A request message for TargetHttpProxies.AggregatedList. See the method description for details. @@ -13310,9 +13393,195 @@ class AggregatedListTargetHttpsProxiesRequest(proto.Message): ) -class AggregatedListTargetInstancesRequest(proto.Message): - r"""A request message for TargetInstances.AggregatedList. See the - method description for details. +class AggregatedListTargetHttpsProxiesRequest(proto.Message): + r"""A request message for TargetHttpsProxies.AggregatedList. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + include_all_scopes (bool): + Indicates whether every visible scope for + each scope type (zone, region, global) should be + included in the response. For new resource types + added after this field, the flag has no effect + as new resource types will always include every + visible scope for each scope type in response. + For resource types which predate this field, if + this flag is omitted or false, only scopes of + the scope types where the resource type is + expected to be found will be included. + + This field is a member of `oneof`_ ``_include_all_scopes``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Name of the project scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + service_project_number (int): + The Shared VPC service project id or service + project number for which aggregated list request + is invoked for subnetworks list-usable api. + + This field is a member of `oneof`_ ``_service_project_number``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + include_all_scopes: bool = proto.Field( + proto.BOOL, + number=391327988, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + service_project_number: int = proto.Field( + proto.INT64, + number=316757497, + optional=True, + ) + + +class AggregatedListTargetInstancesRequest(proto.Message): + r"""A request message for TargetInstances.AggregatedList. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -17945,6 +18214,25 @@ class Backend(proto.Message): isRATE. This field is a member of `oneof`_ ``_max_connections_per_instance``. + max_in_flight_requests (int): + Defines a maximum number of in-flight + requests for the whole NEG or instance group. + Not available if backend's balancingMode isRATE + or CONNECTION. + + This field is a member of `oneof`_ ``_max_in_flight_requests``. + max_in_flight_requests_per_endpoint (int): + Defines a maximum number of in-flight + requests for a single endpoint. Not available if + backend's balancingMode is RATE or CONNECTION. + + This field is a member of `oneof`_ ``_max_in_flight_requests_per_endpoint``. + max_in_flight_requests_per_instance (int): + Defines a maximum number of in-flight + requests for a single VM. Not available if + backend's balancingMode is RATE or CONNECTION. + + This field is a member of `oneof`_ ``_max_in_flight_requests_per_instance``. max_rate (int): Defines a maximum number of HTTP requests per second (RPS). For usage guidelines, seeRate @@ -18005,6 +18293,11 @@ class Backend(proto.Message): possible values. This field is a member of `oneof`_ ``_preference``. + traffic_duration (str): + Check the TrafficDuration enum for the list + of possible values. + + This field is a member of `oneof`_ ``_traffic_duration``. """ class BalancingMode(proto.Enum): @@ -18032,6 +18325,9 @@ class BalancingMode(proto.Enum): connections. CUSTOM_METRICS (331575765): Based on custom defined and reported metrics. + IN_FLIGHT (190040266): + Balance based on the number of in-flight + requests. RATE (2508000): Balance based on requests per second (RPS). UTILIZATION (157008386): @@ -18041,6 +18337,7 @@ class BalancingMode(proto.Enum): UNDEFINED_BALANCING_MODE = 0 CONNECTION = 246311646 CUSTOM_METRICS = 331575765 + IN_FLIGHT = 190040266 RATE = 2508000 UTILIZATION = 157008386 @@ -18074,6 +18371,28 @@ class Preference(proto.Enum): PREFERENCE_UNSPECIFIED = 496219571 PREFERRED = 418847841 + class TrafficDuration(proto.Enum): + r""" + + Values: + UNDEFINED_TRAFFIC_DURATION (0): + A value indicating that the enum field is not + set. + LONG (2342524): + Most of the requests are expected to take + more than multiple seconds to finish. + SHORT (78875740): + Most requests are expected to finish with a + sub-second latency. + TRAFFIC_DURATION_UNSPECIFIED (265201166): + Traffic duration is unspecified. + """ + + UNDEFINED_TRAFFIC_DURATION = 0 + LONG = 2342524 + SHORT = 78875740 + TRAFFIC_DURATION_UNSPECIFIED = 265201166 + balancing_mode: str = proto.Field( proto.STRING, number=430286217, @@ -18119,6 +18438,21 @@ class Preference(proto.Enum): number=104671900, optional=True, ) + max_in_flight_requests: int = proto.Field( + proto.INT32, + number=273269332, + optional=True, + ) + max_in_flight_requests_per_endpoint: int = proto.Field( + proto.INT32, + number=307928706, + optional=True, + ) + max_in_flight_requests_per_instance: int = proto.Field( + proto.INT32, + number=195696002, + optional=True, + ) max_rate: int = proto.Field( proto.INT32, number=408035035, @@ -18150,6 +18484,11 @@ class Preference(proto.Enum): number=150781147, optional=True, ) + traffic_duration: str = proto.Field( + proto.STRING, + number=11618710, + optional=True, + ) class BackendBackendOrchestrationInfo(proto.Message): @@ -18260,6 +18599,14 @@ class BackendBucket(proto.Message): request, but not persisted as part of resource payload. This field is a member of `oneof`_ ``_params``. + region (str): + Output only. [Output Only] URL of the region where the + regional backend bucket resides. This field is not + applicable to global backend buckets. You must specify this + field as part of the HTTP request URL. It is not settable as + a field in the request body. + + This field is a member of `oneof`_ ``_region``. self_link (str): [Output Only] Server-defined URL for the resource. @@ -18303,12 +18650,16 @@ class LoadBalancingScheme(proto.Enum): UNDEFINED_LOAD_BALANCING_SCHEME (0): A value indicating that the enum field is not set. + EXTERNAL_MANAGED (512006923): + Signifies that this will be used for regional + external Application Load Balancers. INTERNAL_MANAGED (37350397): Signifies that this will be used for internal Application Load Balancers. """ UNDEFINED_LOAD_BALANCING_SCHEME = 0 + EXTERNAL_MANAGED = 512006923 INTERNAL_MANAGED = 37350397 bucket_name: str = proto.Field( @@ -18377,6 +18728,11 @@ class LoadBalancingScheme(proto.Enum): optional=True, message="BackendBucketParams", ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) self_link: str = proto.Field( proto.STRING, number=456214797, @@ -18389,6 +18745,81 @@ class LoadBalancingScheme(proto.Enum): ) +class BackendBucketAggregatedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableMapping[str, google.cloud.compute_v1.types.BackendBucketsScopedList]): + A list of BackendBucketsScopedList resources. + kind (str): + Output only. Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger thanmaxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + Output only. [Output Only] Server-defined URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableMapping[str, "BackendBucketsScopedList"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=100526016, + message="BackendBucketsScopedList", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class BackendBucketCdnPolicy(proto.Message): r"""Message containing Cloud CDN configuration for a backend bucket. @@ -18823,6 +19254,82 @@ def raw_page(self): ) +class BackendBucketListUsable(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.BackendBucket]): + A list of BackendBucket resources. + kind (str): + Output only. [Output Only] Type of resource. + Alwayscompute#usableBackendBucketList for lists of usable + backend buckets. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger thanmaxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + Output only. [Output Only] Server-defined URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["BackendBucket"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="BackendBucket", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class BackendBucketParams(proto.Message): r"""Additional Backend Bucket parameters. @@ -18871,6 +19378,35 @@ class BackendBucketUsedBy(proto.Message): ) +class BackendBucketsScopedList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_buckets (MutableSequence[google.cloud.compute_v1.types.BackendBucket]): + A list of BackendBuckets contained in this + scope. + warning (google.cloud.compute_v1.types.Warning): + Informational warning which replaces the list + of backend services when the list is empty. + + This field is a member of `oneof`_ ``_warning``. + """ + + backend_buckets: MutableSequence["BackendBucket"] = proto.RepeatedField( + proto.MESSAGE, + number=158780702, + message="BackendBucket", + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class BackendCustomMetric(proto.Message): r"""Custom Metrics are used for CUSTOM_METRICS balancing_mode. @@ -19010,7 +19546,7 @@ class BackendService(proto.Message): :: - - A regional backend service with the service_protocol set to HTTP, + - A regional backend service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the @@ -19274,7 +19810,7 @@ class BackendService(proto.Message): metrics to use for computing the weights are specified via thecustomMetrics field. This field is applicable to either: - - A regional backend service with the service_protocol set to HTTP, + - A regional backend service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the @@ -19405,7 +19941,7 @@ class BackendService(proto.Message): - A global backend service with the loadBalancingScheme set to INTERNAL_SELF_MANAGED or EXTERNAL_MANAGED. - A regional backend - service with the serviceProtocol set to HTTP, HTTPS, HTTP2 or H2C, and + service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and loadBalancingScheme set to INTERNAL_MANAGED or EXTERNAL_MANAGED. Not supported for Serverless NEGs. @@ -19747,7 +20283,7 @@ class LocalityLbPolicy(proto.Enum): metrics to use for computing the weights are specified via thecustomMetrics field. This field is applicable to either: - - A regional backend service with the service_protocol set to HTTP, + - A regional backend service with the service protocol set to HTTP, HTTPS, HTTP2 or H2C, and load_balancing_scheme set to INTERNAL_MANAGED. - A global backend service with the @@ -23046,6 +23582,16 @@ class BulkInsertDiskResource(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + instant_snapshot_group_parameters (google.cloud.compute_v1.types.InstantSnapshotGroupParameters): + The parameters for the instant snapshot + group. + + This field is a member of `oneof`_ ``_instant_snapshot_group_parameters``. + snapshot_group_parameters (google.cloud.compute_v1.types.SnapshotGroupParameters): + The parameters for the snapshot group. The + usage of snapshot group feature is restricted. + + This field is a member of `oneof`_ ``_snapshot_group_parameters``. source_consistency_group_policy (str): The URL of the DiskConsistencyGroupPolicy for the group of disks to clone. This may be a full @@ -23064,6 +23610,18 @@ class BulkInsertDiskResource(proto.Message): This field is a member of `oneof`_ ``_source_consistency_group_policy``. """ + instant_snapshot_group_parameters: "InstantSnapshotGroupParameters" = proto.Field( + proto.MESSAGE, + number=17051623, + optional=True, + message="InstantSnapshotGroupParameters", + ) + snapshot_group_parameters: "SnapshotGroupParameters" = proto.Field( + proto.MESSAGE, + number=42252485, + optional=True, + message="SnapshotGroupParameters", + ) source_consistency_group_policy: str = proto.Field( proto.STRING, number=19616093, @@ -23761,6 +24319,392 @@ class CacheKeyPolicy(proto.Message): ) +class CachePolicy(proto.Message): + r"""Message containing CachePolicy configuration for URL Map's + Route Action. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + cache_bypass_request_header_names (MutableSequence[str]): + Bypass the cache when the specified request headers are + matched by name, e.g. Pragma or Authorization headers. + Values are case-insensitive. Up to 5 header names can be + specified. The cache is bypassed for all ``cacheMode`` + values. + cache_key_policy (google.cloud.compute_v1.types.CachePolicyCacheKeyPolicy): + The cache key configuration. If not + specified, the default behavior depends on the + backend type: for Backend Services, the complete + request URI is used; for Backend Buckets, the + request URI is used without the protocol or + host, and only query parameters known to Cloud + Storage are included. + + This field is a member of `oneof`_ ``_cache_key_policy``. + cache_mode (str): + Specifies the cache setting for all responses from this + route. If not specified, Cloud CDN uses ``CACHE_ALL_STATIC`` + mode. Check the CacheMode enum for the list of possible + values. + + This field is a member of `oneof`_ ``_cache_mode``. + client_ttl (google.cloud.compute_v1.types.Duration): + Specifies a separate client (e.g. browser client) maximum + TTL for cached content. This is used to clamp the max-age + (or Expires) value sent to the client. With + ``FORCE_CACHE_ALL``, the lesser of ``clientTtl`` and + ``defaultTtl`` is used for the response max-age directive, + along with a "public" directive. For cacheable content in + ``CACHE_ALL_STATIC`` mode, ``clientTtl`` clamps the max-age + from the origin (if specified), or else sets the response + max-age directive to the lesser of the ``clientTtl`` and + ``defaultTtl``, and also ensures a "public" cache-control + directive is present. The maximum allowed value is + 31,622,400s (1 year). If not specified, Cloud CDN uses 3600s + (1 hour) for ``CACHE_ALL_STATIC`` mode. Cannot exceed + ``maxTtl``. Cannot be specified when ``cacheMode`` is + ``USE_ORIGIN_HEADERS``. + + This field is a member of `oneof`_ ``_client_ttl``. + default_ttl (google.cloud.compute_v1.types.Duration): + Specifies the default TTL for cached content for responses + that do not have an existing valid TTL (max-age or + s-maxage). Setting a TTL of "0" means "always revalidate". + The value of ``defaultTtl`` cannot be set to a value greater + than that of ``maxTtl``. When the ``cacheMode`` is set to + ``FORCE_CACHE_ALL``, the ``defaultTtl`` will overwrite the + TTL set in all responses. The maximum allowed value is + 31,622,400s (1 year). Infrequently accessed objects may be + evicted from the cache before the defined TTL. If not + specified, Cloud CDN uses 3600s (1 hour) for + ``CACHE_ALL_STATIC`` and ``FORCE_CACHE_ALL`` modes. Cannot + be specified when ``cacheMode`` is ``USE_ORIGIN_HEADERS``. + + This field is a member of `oneof`_ ``_default_ttl``. + max_ttl (google.cloud.compute_v1.types.Duration): + Specifies the maximum allowed TTL for cached content. Cache + directives that attempt to set a max-age or s-maxage higher + than this, or an Expires header more than ``maxTtl`` seconds + in the future will be capped at the value of ``maxTtl``, as + if it were the value of an s-maxage Cache-Control directive. + Headers sent to the client will not be modified. Setting a + TTL of "0" means "always revalidate". The maximum allowed + value is 31,622,400s (1 year). Infrequently accessed objects + may be evicted from the cache before the defined TTL. If not + specified, Cloud CDN uses 86400s (1 day) for + ``CACHE_ALL_STATIC`` mode. Can be specified only for + ``CACHE_ALL_STATIC`` cache mode. + + This field is a member of `oneof`_ ``_max_ttl``. + negative_caching (bool): + Negative caching allows per-status code TTLs to be set, in + order to apply fine-grained caching for common errors or + redirects. This can reduce the load on your origin and + improve end-user experience by reducing response latency. + When the ``cacheMode`` is set to ``CACHE_ALL_STATIC`` or + ``USE_ORIGIN_HEADERS``, negative caching applies to + responses with the specified response code that lack any + Cache-Control, Expires, or Pragma: no-cache directives. When + the ``cacheMode`` is set to ``FORCE_CACHE_ALL``, negative + caching applies to all responses with the specified response + code, and overrides any caching headers. By default, Cloud + CDN applies the following TTLs to these HTTP status codes: + + - 300 (Multiple Choice), 301, 308 (Permanent Redirects): 10m + - 404 (Not Found), 410 (Gone), 451 (Unavailable For Legal + Reasons): 120s + - 405 (Method Not Found), 501 (Not Implemented): 60s + + These defaults can be overridden in + ``negativeCachingPolicy``. If not specified, Cloud CDN + applies negative caching by default. + + This field is a member of `oneof`_ ``_negative_caching``. + negative_caching_policy (MutableSequence[google.cloud.compute_v1.types.CachePolicyNegativeCachingPolicy]): + Sets a cache TTL for the specified HTTP status code. + ``negativeCaching`` must be enabled to configure + ``negativeCachingPolicy``. Omitting the policy and leaving + ``negativeCaching`` enabled will use Cloud CDN's default + cache TTLs. Note that when specifying an explicit + ``negativeCachingPolicy``, you should take care to specify a + cache TTL for all response codes that you wish to cache. + Cloud CDN will not apply any default negative caching when a + policy exists. + request_coalescing (bool): + If true then Cloud CDN will combine multiple + concurrent cache fill requests into a small + number of requests to the origin. If not + specified, Cloud CDN applies request coalescing + by default. + + This field is a member of `oneof`_ ``_request_coalescing``. + serve_while_stale (google.cloud.compute_v1.types.Duration): + Serve existing content from the cache (if + available) when revalidating content with the + origin, or when an error is encountered when + refreshing the cache. + This setting defines the default "max-stale" + duration for any cached responses that do not + specify a max-stale directive. Stale responses + that exceed the TTL configured here will not be + served. The default limit (max-stale) is 86400s + (1 day), which will allow stale content to be + served up to this limit beyond the max-age (or + s-maxage) of a cached response. + The maximum allowed value is 604800 (1 week). + Set this to zero (0) to disable + serve-while-stale. + + This field is a member of `oneof`_ ``_serve_while_stale``. + """ + + class CacheMode(proto.Enum): + r"""Specifies the cache setting for all responses from this route. If + not specified, Cloud CDN uses ``CACHE_ALL_STATIC`` mode. + + Values: + UNDEFINED_CACHE_MODE (0): + A value indicating that the enum field is not + set. + CACHE_ALL_STATIC (355027945): + Automatically cache static content, including + common image formats, media (video and audio), + and web assets (JavaScript and CSS). Requests + and responses that are marked as uncacheable, as + well as dynamic content (including HTML), will + not be cached. + FORCE_CACHE_ALL (486026928): + Cache all content, ignoring any "private", + "no-store" or "no-cache" directives in + Cache-Control response headers. Warning: this + may result in Cloud CDN caching private, + per-user (user identifiable) content. + USE_ORIGIN_HEADERS (55380261): + Requires the origin to set valid caching + headers to cache content. Responses without + these headers will not be cached at the edge, + and will require a full trip to the origin on + every request, potentially impacting performance + and increasing load on the origin server. + """ + + UNDEFINED_CACHE_MODE = 0 + CACHE_ALL_STATIC = 355027945 + FORCE_CACHE_ALL = 486026928 + USE_ORIGIN_HEADERS = 55380261 + + cache_bypass_request_header_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=197581440, + ) + cache_key_policy: "CachePolicyCacheKeyPolicy" = proto.Field( + proto.MESSAGE, + number=159263727, + optional=True, + message="CachePolicyCacheKeyPolicy", + ) + cache_mode: str = proto.Field( + proto.STRING, + number=28877888, + optional=True, + ) + client_ttl: "Duration" = proto.Field( + proto.MESSAGE, + number=29034360, + optional=True, + message="Duration", + ) + default_ttl: "Duration" = proto.Field( + proto.MESSAGE, + number=100253422, + optional=True, + message="Duration", + ) + max_ttl: "Duration" = proto.Field( + proto.MESSAGE, + number=307578001, + optional=True, + message="Duration", + ) + negative_caching: bool = proto.Field( + proto.BOOL, + number=336110005, + optional=True, + ) + negative_caching_policy: MutableSequence["CachePolicyNegativeCachingPolicy"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=155359996, + message="CachePolicyNegativeCachingPolicy", + ) + ) + request_coalescing: bool = proto.Field( + proto.BOOL, + number=532808276, + optional=True, + ) + serve_while_stale: "Duration" = proto.Field( + proto.MESSAGE, + number=236682203, + optional=True, + message="Duration", + ) + + +class CachePolicyCacheKeyPolicy(proto.Message): + r"""Message containing what to include in the cache key for a + request for Cache Policy defined on Route Action. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + excluded_query_parameters (MutableSequence[str]): + Names of query string parameters to exclude in cache keys. + All other parameters will be included. Either specify + ``excludedQueryParameters`` or ``includedQueryParameters``, + not both. '&' and '=' will be percent encoded and not + treated as delimiters. + + Note: This field applies to routes that use backend + services. Attempting to set it on a route that points + exclusively to Backend Buckets will result in a + configuration error. For routes that point to a Backend + Bucket, use ``includedQueryParameters`` to define which + parameters should be part of the cache key. + include_host (bool): + If true, requests to different hosts will be + cached separately. + Note: This setting is only applicable to routes + that use a Backend Service. It does not affect + requests served by a Backend Bucket, as the host + is never included in a Backend Bucket's cache + key. Attempting to set it on a route that points + exclusively to Backend Buckets will result in a + configuration error. + + This field is a member of `oneof`_ ``_include_host``. + include_protocol (bool): + If true, http and https requests will be + cached separately. + Note: This setting is only applicable to routes + that use a Backend Service. It does not affect + requests served by a Backend Bucket, as the + protocol is never included in a Backend Bucket's + cache key. Attempting to set on a route that + points exclusively to Backend Buckets will + result in a configuration error. + + This field is a member of `oneof`_ ``_include_protocol``. + include_query_string (bool): + If true, include query string parameters in the cache key + according to ``includedQueryParameters`` and + ``excludedQueryParameters``. If neither is set, the entire + query string will be included. If false, the query string + will be excluded from the cache key entirely. + + Note: This field applies to routes that use backend + services. Attempting to set it on a route that points + exclusively to Backend Buckets will result in a + configuration error. For routes that point to a Backend + Bucket, use ``includedQueryParameters`` to define which + parameters should be part of the cache key. + + This field is a member of `oneof`_ ``_include_query_string``. + included_cookie_names (MutableSequence[str]): + Allows HTTP cookies (by name) to be used in + the cache key. The name=value pair will be used + in the cache key Cloud CDN generates. + + Note: This setting is only applicable to routes + that use a Backend Service. It does not affect + requests served by a Backend Bucket. Attempting + to set it on a route that points exclusively to + Backend Buckets will result in a configuration + error. Up to 5 cookie names can be specified. + included_header_names (MutableSequence[str]): + Allows HTTP request headers (by name) to be + used in the cache key. + included_query_parameters (MutableSequence[str]): + Names of query string parameters to include in cache keys. + All other parameters will be excluded. Either specify + ``includedQueryParameters`` or ``excludedQueryParameters``, + not both. '&' and '=' will be percent encoded and not + treated as delimiters. + """ + + excluded_query_parameters: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=124896150, + ) + include_host: bool = proto.Field( + proto.BOOL, + number=486867679, + optional=True, + ) + include_protocol: bool = proto.Field( + proto.BOOL, + number=303507535, + optional=True, + ) + include_query_string: bool = proto.Field( + proto.BOOL, + number=474036639, + optional=True, + ) + included_cookie_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=66987088, + ) + included_header_names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=533756345, + ) + included_query_parameters: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2319076, + ) + + +class CachePolicyNegativeCachingPolicy(proto.Message): + r"""Specify CDN TTLs for response error codes. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + code (int): + The HTTP status code to define a TTL against. + Only HTTP status codes 300, 301, 302, 307, 308, + 404, 405, 410, 421, 451 and 501 can be specified + as values, and you cannot specify a status code + more than once. + + This field is a member of `oneof`_ ``_code``. + ttl (google.cloud.compute_v1.types.Duration): + The TTL (in seconds) for which to cache + responses with the corresponding status code. + The maximum allowed value is 1800s (30 minutes). + Infrequently accessed objects may be evicted + from the cache before the defined TTL. + + This field is a member of `oneof`_ ``_ttl``. + """ + + code: int = proto.Field( + proto.INT32, + number=3059181, + optional=True, + ) + ttl: "Duration" = proto.Field( + proto.MESSAGE, + number=115180, + optional=True, + message="Duration", + ) + + class CalendarModeAdviceRequest(proto.Message): r"""A request to recommend the best way to consume the specified resources in the future. @@ -23985,6 +24929,76 @@ class CancelInstanceGroupManagerResizeRequestRequest(proto.Message): ) +class CancelRegionInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagerResizeRequests.Cancel. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + Name should conform to RFC1035 or be a resource + ID. + project (str): + Project ID for this request. + region (str): + The name of the region + scoping this request. Name should conform to + RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resize_request (str): + The name of the resize request to cancel. + Name should conform to RFC1035 or be a resource + ID. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resize_request: str = proto.Field( + proto.STRING, + number=216941060, + ) + + class CircuitBreakers(proto.Message): r"""Settings controlling the volume of requests, connections and retries to this backend service. @@ -24354,6 +25368,12 @@ class Commitment(proto.Message): except the last character, which cannot be a dash. This field is a member of `oneof`_ ``_name``. + params (google.cloud.compute_v1.types.CommitmentParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. plan (str): The minimum time duration that you commit to purchasing resources. The plan that you choose determines the preset @@ -24747,6 +25767,12 @@ class Type(proto.Enum): number=3373707, optional=True, ) + params: "CommitmentParams" = proto.Field( + proto.MESSAGE, + number=78313862, + optional=True, + message="CommitmentParams", + ) plan: str = proto.Field( proto.STRING, number=3443497, @@ -24963,6 +25989,28 @@ def raw_page(self): ) +class CommitmentParams(proto.Message): + r"""Additional commitment params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Input only. Resource manager tags to be bound to the + commitment. Tag keys and values have the same definition as + resource manager tags. Keys and values can be either in + numeric format, such as ``tagKeys/{tag_key_id}`` and + ``tagValues/{tag_value_id}`` or in namespaced format such as + ``{org_id|project_id}/{tag_key_short_name}`` and + ``{tag_value_short_name}``. The field is ignored (both PUT & + PATCH) when empty. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + + class CommitmentResourceStatus(proto.Message): r"""[Output Only] Contains output only fields. @@ -25246,6 +26294,68 @@ def raw_page(self): ) +class CompositeHealthCheckHealth(proto.Message): + r"""Response message for RegionCompositeHealthChecks.GetHealth + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_sources (MutableSequence[google.cloud.compute_v1.types.CompositeHealthChecksGetHealthResponseHealthSourceHealth]): + Health sources and their corresponding health + states. + health_state (str): + Health state of the CompositeHealthCheck. + Check the HealthState enum for the list of + possible values. + + This field is a member of `oneof`_ ``_health_state``. + kind (str): + Output only. [Output Only] Type of resource. + Alwayscompute#compositeHealthCheckHealth for the health of + composite health checks. + + This field is a member of `oneof`_ ``_kind``. + """ + + class HealthState(proto.Enum): + r"""Health state of the CompositeHealthCheck. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + health_sources: MutableSequence[ + "CompositeHealthChecksGetHealthResponseHealthSourceHealth" + ] = proto.RepeatedField( + proto.MESSAGE, + number=397879509, + message="CompositeHealthChecksGetHealthResponseHealthSourceHealth", + ) + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + + class CompositeHealthCheckList(proto.Message): r""" @@ -25321,6 +26431,57 @@ def raw_page(self): ) +class CompositeHealthChecksGetHealthResponseHealthSourceHealth(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_state (str): + Health state of the associated HealthSource + resource. Check the HealthState enum for the + list of possible values. + + This field is a member of `oneof`_ ``_health_state``. + source (str): + Fully qualified URL of the associated + HealthSource resource. + + This field is a member of `oneof`_ ``_source``. + """ + + class HealthState(proto.Enum): + r"""Health state of the associated HealthSource resource. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + class CompositeHealthChecksScopedList(proto.Message): r""" @@ -27970,15 +29131,15 @@ class DeleteInstancesRegionInstanceGroupManagerRequest(proto.Message): ) -class DeleteInstantSnapshotRequest(proto.Message): - r"""A request message for InstantSnapshots.Delete. See the method - description for details. +class DeleteInstantSnapshotGroupRequest(proto.Message): + r"""A request message for InstantSnapshotGroups.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - instant_snapshot (str): + instant_snapshot_group (str): Name of the InstantSnapshot resource to delete. project (str): @@ -28009,9 +29170,9 @@ class DeleteInstantSnapshotRequest(proto.Message): The name of the zone for this request. """ - instant_snapshot: str = proto.Field( + instant_snapshot_group: str = proto.Field( proto.STRING, - number=391638626, + number=223180386, ) project: str = proto.Field( proto.STRING, @@ -28028,70 +29189,128 @@ class DeleteInstantSnapshotRequest(proto.Message): ) -class DeleteInterconnectAttachmentGroupRequest(proto.Message): - r"""A request message for InterconnectAttachmentGroups.Delete. - See the method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - interconnect_attachment_group (str): - Name of the InterconnectAttachmentGroup - resource to delete. - project (str): - Project ID for this request. - request_id (str): - An optional request ID to identify requests. Specify a - unique request ID so that if you must retry your request, - the server will know to ignore the request if it has already - been completed. - - For example, consider a situation where you make an initial - request and the request times out. If you make the request - again with the same request ID, the server can check if - original operation with the same request ID was received, - and if so, will ignore the second request. This prevents - clients from accidentally creating duplicate commitments. - - The request ID must be a valid UUID with the exception that - zero UUID is not supported - (00000000-0000-0000-0000-000000000000). end_interface: - MixerMutationRequestBuilder - - This field is a member of `oneof`_ ``_request_id``. - """ - - interconnect_attachment_group: str = proto.Field( - proto.STRING, - number=75905012, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - - -class DeleteInterconnectAttachmentRequest(proto.Message): - r"""A request message for InterconnectAttachments.Delete. See the - method description for details. +class DeleteInstantSnapshotRequest(proto.Message): + r"""A request message for InstantSnapshots.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - interconnect_attachment (str): - Name of the interconnect attachment to + instant_snapshot (str): + Name of the InstantSnapshot resource to delete. project (str): Project ID for this request. - region (str): - Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + instant_snapshot: str = proto.Field( + proto.STRING, + number=391638626, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteInterconnectAttachmentGroupRequest(proto.Message): + r"""A request message for InterconnectAttachmentGroups.Delete. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachment_group (str): + Name of the InterconnectAttachmentGroup + resource to delete. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. + + For example, consider a situation where you make an initial + request and the request times out. If you make the request + again with the same request ID, the server can check if + original operation with the same request ID was received, + and if so, will ignore the second request. This prevents + clients from accidentally creating duplicate commitments. + + The request ID must be a valid UUID with the exception that + zero UUID is not supported + (00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. + """ + + interconnect_attachment_group: str = proto.Field( + proto.STRING, + number=75905012, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class DeleteInterconnectAttachmentRequest(proto.Message): + r"""A request message for InterconnectAttachments.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + interconnect_attachment (str): + Name of the interconnect attachment to + delete. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29205,48 +30424,44 @@ class DeleteRegionAutoscalerRequest(proto.Message): ) -class DeleteRegionBackendServiceRequest(proto.Message): - r"""A request message for RegionBackendServices.Delete. See the +class DeleteRegionBackendBucketRequest(proto.Message): + r"""A request message for RegionBackendBuckets.Delete. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - backend_service (str): - Name of the BackendService resource to - delete. + backend_bucket (str): + Name of the BackendBucket resource to delete. project (str): Project ID for this request. region (str): Name of the region scoping this request. request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. + For example, consider a situation where you make an initial + request and the request times out. If you make the request + again with the same request ID, the server can check if + original operation with the same request ID was received, + and if so, will ignore the second request. This prevents + clients from accidentally creating duplicate commitments. - The request ID must be - a valid UUID with the exception that zero UUID - is not supported - (00000000-0000-0000-0000-000000000000). + The request ID must be a valid UUID with the exception that + zero UUID is not supported + (00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder This field is a member of `oneof`_ ``_request_id``. """ - backend_service: str = proto.Field( + backend_bucket: str = proto.Field( proto.STRING, - number=306946058, + number=91714037, ) project: str = proto.Field( proto.STRING, @@ -29263,16 +30478,16 @@ class DeleteRegionBackendServiceRequest(proto.Message): ) -class DeleteRegionCompositeHealthCheckRequest(proto.Message): - r"""A request message for RegionCompositeHealthChecks.Delete. See - the method description for details. +class DeleteRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - composite_health_check (str): - Name of the CompositeHealthCheck resource to + backend_service (str): + Name of the BackendService resource to delete. project (str): Project ID for this request. @@ -29302,9 +30517,9 @@ class DeleteRegionCompositeHealthCheckRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - composite_health_check: str = proto.Field( + backend_service: str = proto.Field( proto.STRING, - number=466984989, + number=306946058, ) project: str = proto.Field( proto.STRING, @@ -29321,77 +30536,19 @@ class DeleteRegionCompositeHealthCheckRequest(proto.Message): ) -class DeleteRegionDiskRequest(proto.Message): - r"""A request message for RegionDisks.Delete. See the method - description for details. +class DeleteRegionCompositeHealthCheckRequest(proto.Message): + r"""A request message for RegionCompositeHealthChecks.Delete. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - disk (str): - Name of the regional persistent disk to + composite_health_check (str): + Name of the CompositeHealthCheck resource to delete. project (str): Project ID for this request. - region (str): - Name of the region for this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be - a valid UUID with the exception that zero UUID - is not supported - (00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - """ - - disk: str = proto.Field( - proto.STRING, - number=3083677, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - - -class DeleteRegionHealthAggregationPolicyRequest(proto.Message): - r"""A request message for RegionHealthAggregationPolicies.Delete. - See the method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - health_aggregation_policy (str): - Name of the HealthAggregationPolicy resource - to delete. - project (str): - Project ID for this request. region (str): Name of the region scoping this request. request_id (str): @@ -29418,9 +30575,9 @@ class DeleteRegionHealthAggregationPolicyRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_aggregation_policy: str = proto.Field( + composite_health_check: str = proto.Field( proto.STRING, - number=240314354, + number=466984989, ) project: str = proto.Field( proto.STRING, @@ -29437,20 +30594,21 @@ class DeleteRegionHealthAggregationPolicyRequest(proto.Message): ) -class DeleteRegionHealthCheckRequest(proto.Message): - r"""A request message for RegionHealthChecks.Delete. See the - method description for details. +class DeleteRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_check (str): - Name of the HealthCheck resource to delete. + disk (str): + Name of the regional persistent disk to + delete. project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29475,9 +30633,9 @@ class DeleteRegionHealthCheckRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_check: str = proto.Field( + disk: str = proto.Field( proto.STRING, - number=308876645, + number=3083677, ) project: str = proto.Field( proto.STRING, @@ -29494,18 +30652,17 @@ class DeleteRegionHealthCheckRequest(proto.Message): ) -class DeleteRegionHealthCheckServiceRequest(proto.Message): - r"""A request message for RegionHealthCheckServices.Delete. See - the method description for details. +class DeleteRegionHealthAggregationPolicyRequest(proto.Message): + r"""A request message for RegionHealthAggregationPolicies.Delete. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_check_service (str): - Name of the HealthCheckService to delete. The - name must be 1-63 characters long, and comply - with RFC1035. + health_aggregation_policy (str): + Name of the HealthAggregationPolicy resource + to delete. project (str): Project ID for this request. region (str): @@ -29534,9 +30691,9 @@ class DeleteRegionHealthCheckServiceRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_check_service: str = proto.Field( + health_aggregation_policy: str = proto.Field( proto.STRING, - number=408374747, + number=240314354, ) project: str = proto.Field( proto.STRING, @@ -29553,16 +30710,16 @@ class DeleteRegionHealthCheckServiceRequest(proto.Message): ) -class DeleteRegionHealthSourceRequest(proto.Message): - r"""A request message for RegionHealthSources.Delete. See the +class DeleteRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Delete. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_source (str): - Name of the HealthSource resource to delete. + health_check (str): + Name of the HealthCheck resource to delete. project (str): Project ID for this request. region (str): @@ -29591,9 +30748,9 @@ class DeleteRegionHealthSourceRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_source: str = proto.Field( + health_check: str = proto.Field( proto.STRING, - number=376521566, + number=308876645, ) project: str = proto.Field( proto.STRING, @@ -29610,16 +30767,18 @@ class DeleteRegionHealthSourceRequest(proto.Message): ) -class DeleteRegionInstanceGroupManagerRequest(proto.Message): - r"""A request message for RegionInstanceGroupManagers.Delete. See +class DeleteRegionHealthCheckServiceRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.Delete. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - instance_group_manager (str): - Name of the managed instance group to delete. + health_check_service (str): + Name of the HealthCheckService to delete. The + name must be 1-63 characters long, and comply + with RFC1035. project (str): Project ID for this request. region (str): @@ -29648,9 +30807,9 @@ class DeleteRegionInstanceGroupManagerRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - instance_group_manager: str = proto.Field( + health_check_service: str = proto.Field( proto.STRING, - number=249363395, + number=408374747, ) project: str = proto.Field( proto.STRING, @@ -29667,20 +30826,20 @@ class DeleteRegionInstanceGroupManagerRequest(proto.Message): ) -class DeleteRegionInstanceTemplateRequest(proto.Message): - r"""A request message for RegionInstanceTemplates.Delete. See the +class DeleteRegionHealthSourceRequest(proto.Message): + r"""A request message for RegionHealthSources.Delete. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - instance_template (str): - The name of the instance template to delete. + health_source (str): + Name of the HealthSource resource to delete. project (str): Project ID for this request. region (str): - The name of the region for this request. + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29705,9 +30864,9 @@ class DeleteRegionInstanceTemplateRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - instance_template: str = proto.Field( + health_source: str = proto.Field( proto.STRING, - number=309248228, + number=376521566, ) project: str = proto.Field( proto.STRING, @@ -29724,21 +30883,20 @@ class DeleteRegionInstanceTemplateRequest(proto.Message): ) -class DeleteRegionInstantSnapshotRequest(proto.Message): - r"""A request message for RegionInstantSnapshots.Delete. See the - method description for details. +class DeleteRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Delete. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - instant_snapshot (str): - Name of the InstantSnapshot resource to - delete. + instance_group_manager (str): + Name of the managed instance group to delete. project (str): Project ID for this request. region (str): - The name of the region for this request. + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29763,9 +30921,9 @@ class DeleteRegionInstantSnapshotRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - instant_snapshot: str = proto.Field( + instance_group_manager: str = proto.Field( proto.STRING, - number=391638626, + number=249363395, ) project: str = proto.Field( proto.STRING, @@ -29782,23 +30940,25 @@ class DeleteRegionInstantSnapshotRequest(proto.Message): ) -class DeleteRegionNetworkEndpointGroupRequest(proto.Message): - r"""A request message for RegionNetworkEndpointGroups.Delete. See - the method description for details. +class DeleteRegionInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagerResizeRequests.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - network_endpoint_group (str): - The name of the network endpoint group to - delete. It should comply with RFC1035. + instance_group_manager (str): + The name of the managed instance group. + Name should conform to RFC1035 or be a resource + ID. project (str): Project ID for this request. region (str): - The name of the region where - the network endpoint group is located. It should - comply with RFC1035. + The name of the region + scoping this request. Name should conform to + RFC1035. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29821,11 +30981,15 @@ class DeleteRegionNetworkEndpointGroupRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. + resize_request (str): + The name of the resize request to delete. + Name should conform to RFC1035 or be a resource + ID. """ - network_endpoint_group: str = proto.Field( + instance_group_manager: str = proto.Field( proto.STRING, - number=433907078, + number=249363395, ) project: str = proto.Field( proto.STRING, @@ -29840,22 +31004,26 @@ class DeleteRegionNetworkEndpointGroupRequest(proto.Message): number=37109963, optional=True, ) + resize_request: str = proto.Field( + proto.STRING, + number=216941060, + ) -class DeleteRegionNetworkFirewallPolicyRequest(proto.Message): - r"""A request message for RegionNetworkFirewallPolicies.Delete. - See the method description for details. +class DeleteRegionInstanceTemplateRequest(proto.Message): + r"""A request message for RegionInstanceTemplates.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - firewall_policy (str): - Name of the firewall policy to delete. + instance_template (str): + The name of the instance template to delete. project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29880,9 +31048,9 @@ class DeleteRegionNetworkFirewallPolicyRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - firewall_policy: str = proto.Field( + instance_template: str = proto.Field( proto.STRING, - number=498173265, + number=309248228, ) project: str = proto.Field( proto.STRING, @@ -29899,21 +31067,21 @@ class DeleteRegionNetworkFirewallPolicyRequest(proto.Message): ) -class DeleteRegionNotificationEndpointRequest(proto.Message): - r"""A request message for RegionNotificationEndpoints.Delete. See +class DeleteRegionInstantSnapshotGroupRequest(proto.Message): + r"""A request message for RegionInstantSnapshotGroups.Delete. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - notification_endpoint (str): - Name of the NotificationEndpoint resource to + instant_snapshot_group (str): + Name of the InstantSnapshotGroup resource to delete. project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -29938,9 +31106,9 @@ class DeleteRegionNotificationEndpointRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - notification_endpoint: str = proto.Field( + instant_snapshot_group: str = proto.Field( proto.STRING, - number=376807017, + number=223180386, ) project: str = proto.Field( proto.STRING, @@ -29957,23 +31125,48 @@ class DeleteRegionNotificationEndpointRequest(proto.Message): ) -class DeleteRegionOperationRequest(proto.Message): - r"""A request message for RegionOperations.Delete. See the method - description for details. +class DeleteRegionInstantSnapshotRequest(proto.Message): + r"""A request message for RegionInstantSnapshots.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - operation (str): - Name of the Operations resource to delete, or - its unique numeric identifier. + instant_snapshot (str): + Name of the InstantSnapshot resource to + delete. project (str): Project ID for this request. region (str): - Name of the region for this request. + The name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. """ - operation: str = proto.Field( + instant_snapshot: str = proto.Field( proto.STRING, - number=52090215, + number=391638626, ) project: str = proto.Field( proto.STRING, @@ -29983,27 +31176,30 @@ class DeleteRegionOperationRequest(proto.Message): proto.STRING, number=138946292, ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) -class DeleteRegionOperationResponse(proto.Message): - r"""A response message for RegionOperations.Delete. See the - method description for details. - - """ - - -class DeleteRegionSecurityPolicyRequest(proto.Message): - r"""A request message for RegionSecurityPolicies.Delete. See the - method description for details. +class DeleteRegionNetworkEndpointGroupRequest(proto.Message): + r"""A request message for RegionNetworkEndpointGroups.Delete. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + network_endpoint_group (str): + The name of the network endpoint group to + delete. It should comply with RFC1035. project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region where + the network endpoint group is located. It should + comply with RFC1035. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30026,10 +31222,12 @@ class DeleteRegionSecurityPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - security_policy (str): - Name of the security policy to delete. """ + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -30043,20 +31241,18 @@ class DeleteRegionSecurityPolicyRequest(proto.Message): number=37109963, optional=True, ) - security_policy: str = proto.Field( - proto.STRING, - number=171082513, - ) -class DeleteRegionSslCertificateRequest(proto.Message): - r"""A request message for RegionSslCertificates.Delete. See the - method description for details. +class DeleteRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.Delete. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + firewall_policy (str): + Name of the firewall policy to delete. project (str): Project ID for this request. region (str): @@ -30083,11 +31279,12 @@ class DeleteRegionSslCertificateRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - ssl_certificate (str): - Name of the SslCertificate resource to - delete. """ + firewall_policy: str = proto.Field( + proto.STRING, + number=498173265, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -30101,20 +31298,19 @@ class DeleteRegionSslCertificateRequest(proto.Message): number=37109963, optional=True, ) - ssl_certificate: str = proto.Field( - proto.STRING, - number=46443492, - ) -class DeleteRegionSslPolicyRequest(proto.Message): - r"""A request message for RegionSslPolicies.Delete. See the - method description for details. +class DeleteRegionNotificationEndpointRequest(proto.Message): + r"""A request message for RegionNotificationEndpoints.Delete. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + notification_endpoint (str): + Name of the NotificationEndpoint resource to + delete. project (str): Project ID for this request. region (str): @@ -30141,12 +31337,12 @@ class DeleteRegionSslPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - ssl_policy (str): - Name of the SSL policy to delete. The name - must be 1-63 characters long, and comply with - RFC1035. """ + notification_endpoint: str = proto.Field( + proto.STRING, + number=376807017, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -30160,14 +31356,45 @@ class DeleteRegionSslPolicyRequest(proto.Message): number=37109963, optional=True, ) - ssl_policy: str = proto.Field( + + +class DeleteRegionOperationRequest(proto.Message): + r"""A request message for RegionOperations.Delete. See the method + description for details. + + Attributes: + operation (str): + Name of the Operations resource to delete, or + its unique numeric identifier. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + operation: str = proto.Field( proto.STRING, - number=295190213, + number=52090215, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, ) -class DeleteRegionTargetHttpProxyRequest(proto.Message): - r"""A request message for RegionTargetHttpProxies.Delete. See the +class DeleteRegionOperationResponse(proto.Message): + r"""A response message for RegionOperations.Delete. See the + method description for details. + + """ + + +class DeleteRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.Delete. See the method description for details. @@ -30200,9 +31427,8 @@ class DeleteRegionTargetHttpProxyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_http_proxy (str): - Name of the TargetHttpProxy resource to - delete. + security_policy (str): + Name of the security policy to delete. """ project: str = proto.Field( @@ -30218,15 +31444,15 @@ class DeleteRegionTargetHttpProxyRequest(proto.Message): number=37109963, optional=True, ) - target_http_proxy: str = proto.Field( + security_policy: str = proto.Field( proto.STRING, - number=206872421, + number=171082513, ) -class DeleteRegionTargetHttpsProxyRequest(proto.Message): - r"""A request message for RegionTargetHttpsProxies.Delete. See - the method description for details. +class DeleteRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -30235,7 +31461,7 @@ class DeleteRegionTargetHttpsProxyRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30258,9 +31484,8 @@ class DeleteRegionTargetHttpsProxyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_https_proxy (str): - Name of the TargetHttpsProxy resource to - delete. + snapshot (str): + Name of the snapshot resource to delete. """ project: str = proto.Field( @@ -30276,14 +31501,14 @@ class DeleteRegionTargetHttpsProxyRequest(proto.Message): number=37109963, optional=True, ) - target_https_proxy: str = proto.Field( + snapshot: str = proto.Field( proto.STRING, - number=52336748, + number=284874180, ) -class DeleteRegionTargetTcpProxyRequest(proto.Message): - r"""A request message for RegionTargetTcpProxies.Delete. See the +class DeleteRegionSslCertificateRequest(proto.Message): + r"""A request message for RegionSslCertificates.Delete. See the method description for details. @@ -30316,8 +31541,8 @@ class DeleteRegionTargetTcpProxyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_tcp_proxy (str): - Name of the TargetTcpProxy resource to + ssl_certificate (str): + Name of the SslCertificate resource to delete. """ @@ -30334,15 +31559,15 @@ class DeleteRegionTargetTcpProxyRequest(proto.Message): number=37109963, optional=True, ) - target_tcp_proxy: str = proto.Field( + ssl_certificate: str = proto.Field( proto.STRING, - number=503065442, + number=46443492, ) -class DeleteRegionUrlMapRequest(proto.Message): - r"""A request message for RegionUrlMaps.Delete. See the method - description for details. +class DeleteRegionSslPolicyRequest(proto.Message): + r"""A request message for RegionSslPolicies.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -30353,12 +31578,31 @@ class DeleteRegionUrlMapRequest(proto.Message): region (str): Name of the region scoping this request. request_id (str): - begin_interface: MixerMutationRequestBuilder Request ID to - support idempotency. + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - url_map (str): - Name of the UrlMap resource to delete. + ssl_policy (str): + Name of the SSL policy to delete. The name + must be 1-63 characters long, and comply with + RFC1035. """ project: str = proto.Field( @@ -30374,15 +31618,15 @@ class DeleteRegionUrlMapRequest(proto.Message): number=37109963, optional=True, ) - url_map: str = proto.Field( + ssl_policy: str = proto.Field( proto.STRING, - number=367020684, + number=295190213, ) -class DeleteReservationRequest(proto.Message): - r"""A request message for Reservations.Delete. See the method - description for details. +class DeleteRegionTargetHttpProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -30390,6 +31634,8 @@ class DeleteReservationRequest(proto.Message): Attributes: project (str): Project ID for this request. + region (str): + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30412,34 +31658,33 @@ class DeleteReservationRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - reservation (str): - Name of the reservation to delete. - zone (str): - Name of the zone for this request. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + delete. """ project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - reservation: str = proto.Field( - proto.STRING, - number=47530956, - ) - zone: str = proto.Field( + target_http_proxy: str = proto.Field( proto.STRING, - number=3744684, + number=206872421, ) -class DeleteResourcePolicyRequest(proto.Message): - r"""A request message for ResourcePolicies.Delete. See the method - description for details. +class DeleteRegionTargetHttpsProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.Delete. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -30448,7 +31693,7 @@ class DeleteResourcePolicyRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region for this request. + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30471,8 +31716,9 @@ class DeleteResourcePolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - resource_policy (str): - Name of the resource policy to delete. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + delete. """ project: str = proto.Field( @@ -30488,29 +31734,24 @@ class DeleteResourcePolicyRequest(proto.Message): number=37109963, optional=True, ) - resource_policy: str = proto.Field( + target_https_proxy: str = proto.Field( proto.STRING, - number=159240835, + number=52336748, ) -class DeleteRoutePolicyRouterRequest(proto.Message): - r"""A request message for Routers.DeleteRoutePolicy. See the +class DeleteRegionTargetTcpProxyRequest(proto.Message): + r"""A request message for RegionTargetTcpProxies.Delete. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - policy (str): - The Policy name for this request. Name must - conform to RFC1035 - - This field is a member of `oneof`_ ``_policy``. project (str): Project ID for this request. region (str): - Name of the region for this request. + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30533,16 +31774,11 @@ class DeleteRoutePolicyRouterRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - router (str): - Name of the Router resource where Route - Policy is defined. + target_tcp_proxy (str): + Name of the TargetTcpProxy resource to + delete. """ - policy: str = proto.Field( - proto.STRING, - number=91071794, - optional=True, - ) project: str = proto.Field( proto.STRING, number=227560217, @@ -30556,14 +31792,14 @@ class DeleteRoutePolicyRouterRequest(proto.Message): number=37109963, optional=True, ) - router: str = proto.Field( + target_tcp_proxy: str = proto.Field( proto.STRING, - number=148608841, + number=503065442, ) -class DeleteRouteRequest(proto.Message): - r"""A request message for Routes.Delete. See the method +class DeleteRegionUrlMapRequest(proto.Message): + r"""A request message for RegionUrlMaps.Delete. See the method description for details. @@ -30572,49 +31808,38 @@ class DeleteRouteRequest(proto.Message): Attributes: project (str): Project ID for this request. + region (str): + Name of the region scoping this request. request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be - a valid UUID with the exception that zero UUID - is not supported - (00000000-0000-0000-0000-000000000000). + begin_interface: MixerMutationRequestBuilder Request ID to + support idempotency. This field is a member of `oneof`_ ``_request_id``. - route (str): - Name of the Route resource to delete. + url_map (str): + Name of the UrlMap resource to delete. """ project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - route: str = proto.Field( + url_map: str = proto.Field( proto.STRING, - number=108704329, + number=367020684, ) -class DeleteRouterRequest(proto.Message): - r"""A request message for Routers.Delete. See the method +class DeleteReservationRequest(proto.Message): + r"""A request message for Reservations.Delete. See the method description for details. @@ -30623,8 +31848,6 @@ class DeleteRouterRequest(proto.Message): Attributes: project (str): Project ID for this request. - region (str): - Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30647,31 +31870,33 @@ class DeleteRouterRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - router (str): - Name of the Router resource to delete. + reservation (str): + Name of the reservation to delete. + zone (str): + Name of the zone for this request. """ project: str = proto.Field( proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - router: str = proto.Field( + reservation: str = proto.Field( proto.STRING, - number=148608841, + number=47530956, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, ) -class DeleteSecurityPolicyRequest(proto.Message): - r"""A request message for SecurityPolicies.Delete. See the method +class DeleteResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.Delete. See the method description for details. @@ -30680,6 +31905,8 @@ class DeleteSecurityPolicyRequest(proto.Message): Attributes: project (str): Project ID for this request. + region (str): + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30702,37 +31929,46 @@ class DeleteSecurityPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - security_policy (str): - Name of the security policy to delete. + resource_policy (str): + Name of the resource policy to delete. """ project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - security_policy: str = proto.Field( + resource_policy: str = proto.Field( proto.STRING, - number=171082513, + number=159240835, ) -class DeleteServiceAttachmentRequest(proto.Message): - r"""A request message for ServiceAttachments.Delete. See the +class DeleteRoutePolicyRouterRequest(proto.Message): + r"""A request message for Routers.DeleteRoutePolicy. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + policy (str): + The Policy name for this request. Name must + conform to RFC1035 + + This field is a member of `oneof`_ ``_policy``. project (str): Project ID for this request. region (str): - Name of the region of this request. + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30755,11 +31991,16 @@ class DeleteServiceAttachmentRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - service_attachment (str): - Name of the ServiceAttachment resource to - delete. + router (str): + Name of the Router resource where Route + Policy is defined. """ + policy: str = proto.Field( + proto.STRING, + number=91071794, + optional=True, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -30773,26 +32014,20 @@ class DeleteServiceAttachmentRequest(proto.Message): number=37109963, optional=True, ) - service_attachment: str = proto.Field( + router: str = proto.Field( proto.STRING, - number=338957549, + number=148608841, ) -class DeleteSignedUrlKeyBackendBucketRequest(proto.Message): - r"""A request message for BackendBuckets.DeleteSignedUrlKey. See - the method description for details. +class DeleteRouteRequest(proto.Message): + r"""A request message for Routes.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - backend_bucket (str): - Name of the BackendBucket resource to which - the Signed URL Key should be added. The name - should conform to RFC1035. - key_name (str): - The name of the Signed URL Key to delete. project (str): Project ID for this request. request_id (str): @@ -30817,16 +32052,10 @@ class DeleteSignedUrlKeyBackendBucketRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. + route (str): + Name of the Route resource to delete. """ - backend_bucket: str = proto.Field( - proto.STRING, - number=91714037, - ) - key_name: str = proto.Field( - proto.STRING, - number=500938859, - ) project: str = proto.Field( proto.STRING, number=227560217, @@ -30836,24 +32065,24 @@ class DeleteSignedUrlKeyBackendBucketRequest(proto.Message): number=37109963, optional=True, ) + route: str = proto.Field( + proto.STRING, + number=108704329, + ) -class DeleteSignedUrlKeyBackendServiceRequest(proto.Message): - r"""A request message for BackendServices.DeleteSignedUrlKey. See - the method description for details. +class DeleteRouterRequest(proto.Message): + r"""A request message for Routers.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - backend_service (str): - Name of the BackendService resource to which - the Signed URL Key should be added. The name - should conform to RFC1035. - key_name (str): - The name of the Signed URL Key to delete. project (str): Project ID for this request. + region (str): + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30876,29 +32105,31 @@ class DeleteSignedUrlKeyBackendServiceRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. + router (str): + Name of the Router resource to delete. """ - backend_service: str = proto.Field( - proto.STRING, - number=306946058, - ) - key_name: str = proto.Field( - proto.STRING, - number=500938859, - ) project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) -class DeleteSnapshotRequest(proto.Message): - r"""A request message for Snapshots.Delete. See the method +class DeleteSecurityPolicyRequest(proto.Message): + r"""A request message for SecurityPolicies.Delete. See the method description for details. @@ -30929,8 +32160,8 @@ class DeleteSnapshotRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - snapshot (str): - Name of the Snapshot resource to delete. + security_policy (str): + Name of the security policy to delete. """ project: str = proto.Field( @@ -30942,15 +32173,15 @@ class DeleteSnapshotRequest(proto.Message): number=37109963, optional=True, ) - snapshot: str = proto.Field( + security_policy: str = proto.Field( proto.STRING, - number=284874180, + number=171082513, ) -class DeleteSslCertificateRequest(proto.Message): - r"""A request message for SslCertificates.Delete. See the method - description for details. +class DeleteServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -30958,6 +32189,8 @@ class DeleteSslCertificateRequest(proto.Message): Attributes: project (str): Project ID for this request. + region (str): + Name of the region of this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -30980,8 +32213,8 @@ class DeleteSslCertificateRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - ssl_certificate (str): - Name of the SslCertificate resource to + service_attachment (str): + Name of the ServiceAttachment resource to delete. """ @@ -30989,25 +32222,35 @@ class DeleteSslCertificateRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - ssl_certificate: str = proto.Field( + service_attachment: str = proto.Field( proto.STRING, - number=46443492, + number=338957549, ) -class DeleteSslPolicyRequest(proto.Message): - r"""A request message for SslPolicies.Delete. See the method - description for details. +class DeleteSignedUrlKeyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.DeleteSignedUrlKey. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + backend_bucket (str): + Name of the BackendBucket resource to which + the Signed URL Key should be added. The name + should conform to RFC1035. + key_name (str): + The name of the Signed URL Key to delete. project (str): Project ID for this request. request_id (str): @@ -31032,12 +32275,16 @@ class DeleteSslPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - ssl_policy (str): - Name of the SSL policy to delete. The name - must be 1-63 characters long, and comply with - RFC1035. """ + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + key_name: str = proto.Field( + proto.STRING, + number=500938859, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -31047,20 +32294,22 @@ class DeleteSslPolicyRequest(proto.Message): number=37109963, optional=True, ) - ssl_policy: str = proto.Field( - proto.STRING, - number=295190213, - ) -class DeleteStoragePoolRequest(proto.Message): - r"""A request message for StoragePools.Delete. See the method - description for details. +class DeleteSignedUrlKeyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.DeleteSignedUrlKey. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + backend_service (str): + Name of the BackendService resource to which + the Signed URL Key should be added. The name + should conform to RFC1035. + key_name (str): + The name of the Signed URL Key to delete. project (str): Project ID for this request. request_id (str): @@ -31085,12 +32334,16 @@ class DeleteStoragePoolRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - storage_pool (str): - Name of the storage pool to delete. - zone (str): - The name of the zone for this request. """ + backend_service: str = proto.Field( + proto.STRING, + number=306946058, + ) + key_name: str = proto.Field( + proto.STRING, + number=500938859, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -31100,18 +32353,10 @@ class DeleteStoragePoolRequest(proto.Message): number=37109963, optional=True, ) - storage_pool: str = proto.Field( - proto.STRING, - number=360473440, - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class DeleteSubnetworkRequest(proto.Message): - r"""A request message for Subnetworks.Delete. See the method +class DeleteSnapshotRequest(proto.Message): + r"""A request message for Snapshots.Delete. See the method description for details. @@ -31120,8 +32365,6 @@ class DeleteSubnetworkRequest(proto.Message): Attributes: project (str): Project ID for this request. - region (str): - Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -31144,32 +32387,28 @@ class DeleteSubnetworkRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - subnetwork (str): - Name of the Subnetwork resource to delete. + snapshot (str): + Name of the Snapshot resource to delete. """ project: str = proto.Field( proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - subnetwork: str = proto.Field( + snapshot: str = proto.Field( proto.STRING, - number=307827694, + number=284874180, ) -class DeleteTargetGrpcProxyRequest(proto.Message): - r"""A request message for TargetGrpcProxies.Delete. See the - method description for details. +class DeleteSslCertificateRequest(proto.Message): + r"""A request message for SslCertificates.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -31199,8 +32438,8 @@ class DeleteTargetGrpcProxyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_grpc_proxy (str): - Name of the TargetGrpcProxy resource to + ssl_certificate (str): + Name of the SslCertificate resource to delete. """ @@ -31213,15 +32452,15 @@ class DeleteTargetGrpcProxyRequest(proto.Message): number=37109963, optional=True, ) - target_grpc_proxy: str = proto.Field( + ssl_certificate: str = proto.Field( proto.STRING, - number=5020283, + number=46443492, ) -class DeleteTargetHttpProxyRequest(proto.Message): - r"""A request message for TargetHttpProxies.Delete. See the - method description for details. +class DeleteSslPolicyRequest(proto.Message): + r"""A request message for SslPolicies.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -31251,9 +32490,10 @@ class DeleteTargetHttpProxyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_http_proxy (str): - Name of the TargetHttpProxy resource to - delete. + ssl_policy (str): + Name of the SSL policy to delete. The name + must be 1-63 characters long, and comply with + RFC1035. """ project: str = proto.Field( @@ -31265,15 +32505,15 @@ class DeleteTargetHttpProxyRequest(proto.Message): number=37109963, optional=True, ) - target_http_proxy: str = proto.Field( + ssl_policy: str = proto.Field( proto.STRING, - number=206872421, + number=295190213, ) -class DeleteTargetHttpsProxyRequest(proto.Message): - r"""A request message for TargetHttpsProxies.Delete. See the - method description for details. +class DeleteStoragePoolRequest(proto.Message): + r"""A request message for StoragePools.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -31303,9 +32543,10 @@ class DeleteTargetHttpsProxyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_https_proxy (str): - Name of the TargetHttpsProxy resource to - delete. + storage_pool (str): + Name of the storage pool to delete. + zone (str): + The name of the zone for this request. """ project: str = proto.Field( @@ -31317,14 +32558,18 @@ class DeleteTargetHttpsProxyRequest(proto.Message): number=37109963, optional=True, ) - target_https_proxy: str = proto.Field( + storage_pool: str = proto.Field( proto.STRING, - number=52336748, + number=360473440, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, ) -class DeleteTargetInstanceRequest(proto.Message): - r"""A request message for TargetInstances.Delete. See the method +class DeleteSubnetworkRequest(proto.Message): + r"""A request message for Subnetworks.Delete. See the method description for details. @@ -31333,6 +32578,8 @@ class DeleteTargetInstanceRequest(proto.Message): Attributes: project (str): Project ID for this request. + region (str): + Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -31355,35 +32602,32 @@ class DeleteTargetInstanceRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_instance (str): - Name of the TargetInstance resource to - delete. - zone (str): - Name of the zone scoping this request. + subnetwork (str): + Name of the Subnetwork resource to delete. """ project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - target_instance: str = proto.Field( - proto.STRING, - number=289769347, - ) - zone: str = proto.Field( + subnetwork: str = proto.Field( proto.STRING, - number=3744684, + number=307827694, ) -class DeleteTargetPoolRequest(proto.Message): - r"""A request message for TargetPools.Delete. See the method - description for details. +class DeleteTargetGrpcProxyRequest(proto.Message): + r"""A request message for TargetGrpcProxies.Delete. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -31391,8 +32635,6 @@ class DeleteTargetPoolRequest(proto.Message): Attributes: project (str): Project ID for this request. - region (str): - Name of the region scoping this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -31415,32 +32657,248 @@ class DeleteTargetPoolRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - target_pool (str): - Name of the TargetPool resource to delete. + target_grpc_proxy (str): + Name of the TargetGrpcProxy resource to + delete. """ project: str = proto.Field( proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) request_id: str = proto.Field( proto.STRING, number=37109963, optional=True, ) - target_pool: str = proto.Field( + target_grpc_proxy: str = proto.Field( proto.STRING, - number=62796298, + number=5020283, ) -class DeleteTargetSslProxyRequest(proto.Message): - r"""A request message for TargetSslProxies.Delete. See the method - description for details. +class DeleteTargetHttpProxyRequest(proto.Message): + r"""A request message for TargetHttpProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_http_proxy (str): + Name of the TargetHttpProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_http_proxy: str = proto.Field( + proto.STRING, + number=206872421, + ) + + +class DeleteTargetHttpsProxyRequest(proto.Message): + r"""A request message for TargetHttpsProxies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_https_proxy (str): + Name of the TargetHttpsProxy resource to + delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_https_proxy: str = proto.Field( + proto.STRING, + number=52336748, + ) + + +class DeleteTargetInstanceRequest(proto.Message): + r"""A request message for TargetInstances.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_instance (str): + Name of the TargetInstance resource to + delete. + zone (str): + Name of the zone scoping this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_instance: str = proto.Field( + proto.STRING, + number=289769347, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class DeleteTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.Delete. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + target_pool (str): + Name of the TargetPool resource to delete. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + target_pool: str = proto.Field( + proto.STRING, + number=62796298, + ) + + +class DeleteTargetSslProxyRequest(proto.Message): + r"""A request message for TargetSslProxies.Delete. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -31853,6 +33311,64 @@ class DeleteZoneOperationResponse(proto.Message): """ +class DeleteZoneVmExtensionPolicyRequest(proto.Message): + r"""A request message for ZoneVmExtensionPolicies.Delete. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vm_extension_policy (str): + Name of the zone VM extension policy to + delete. + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vm_extension_policy: str = proto.Field( + proto.STRING, + number=331532730, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class Denied(proto.Message): r""" @@ -34083,6 +35599,35 @@ class DiskTypesScopedList(proto.Message): ) +class DiskUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + disk. If empty, the disk will be re-encrypted using the + primary version of the disk's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/location/keyRings/key_ring/cryptoKeys/key + + Where project is the project ID or project number. + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class DisksAddResourcePoliciesRequest(proto.Message): r""" @@ -34501,7 +36046,7 @@ class EnableXpnResourceProjectRequest(proto.Message): class Error(proto.Message): - r"""Output only. [Output Only] Encountered errors. + r"""Output only. Encountered errors. Attributes: errors (MutableSequence[google.cloud.compute_v1.types.Errors]): @@ -38217,6 +39762,11 @@ class FutureReservation(proto.Message): new commitment or update an existing commitment. This field is a member of `oneof`_ ``_commitment_info``. + confidential_compute_type (str): + Check the ConfidentialComputeType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_confidential_compute_type``. creation_timestamp (str): Output only. [Output Only] The creation timestamp for this future reservation inRFC3339 text format. @@ -38268,6 +39818,12 @@ class FutureReservation(proto.Message): reservations name format will be -date-####. This field is a member of `oneof`_ ``_name_prefix``. + params (google.cloud.compute_v1.types.FutureReservationParams): + Input only. Additional params passed with the + request, but not persisted as part of resource + payload. + + This field is a member of `oneof`_ ``_params``. planning_status (str): Planning state before being submitted for evaluation Check the PlanningStatus enum for the @@ -38338,6 +39894,23 @@ class FutureReservation(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class ConfidentialComputeType(proto.Enum): + r""" + + Values: + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE (0): + A value indicating that the enum field is not + set. + CONFIDENTIAL_COMPUTE_TYPE_TDX (301241954): + Intel Trust Domain Extensions. + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED (42227601): + No description available. + """ + + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE = 0 + CONFIDENTIAL_COMPUTE_TYPE_TDX = 301241954 + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED = 42227601 + class DeploymentType(proto.Enum): r"""Type of the deployment requested as part of future reservation. @@ -38455,6 +40028,11 @@ class SchedulingType(proto.Enum): optional=True, message="FutureReservationCommitmentInfo", ) + confidential_compute_type: str = proto.Field( + proto.STRING, + number=386447257, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -38495,6 +40073,12 @@ class SchedulingType(proto.Enum): number=236409542, optional=True, ) + params: "FutureReservationParams" = proto.Field( + proto.MESSAGE, + number=78313862, + optional=True, + message="FutureReservationParams", + ) planning_status: str = proto.Field( proto.STRING, number=19714836, @@ -38653,6 +40237,28 @@ class PreviousCommitmentTerms(proto.Enum): ) +class FutureReservationParams(proto.Message): + r"""Additional future reservation params. + + Attributes: + resource_manager_tags (MutableMapping[str, str]): + Input only. Resource manager tags to be bound to the future + reservation. Tag keys and values have the same definition as + resource manager tags. Keys and values can be either in + numeric format, such as ``tagKeys/{tag_key_id}`` and + ``tagValues/{tag_value_id}`` or in namespaced format such as + ``{org_id|project_id}/{tag_key_short_name}`` and + ``{tag_value_short_name}``. The field is ignored (both PUT & + PATCH) when empty. + """ + + resource_manager_tags: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=377671164, + ) + + class FutureReservationSpecificSKUProperties(proto.Message): r""" @@ -41042,26 +42648,23 @@ class GetHealthRegionBackendServiceRequest(proto.Message): ) -class GetHealthTargetPoolRequest(proto.Message): - r"""A request message for TargetPools.GetHealth. See the method - description for details. +class GetHealthRegionCompositeHealthCheckRequest(proto.Message): + r"""A request message for RegionCompositeHealthChecks.GetHealth. + See the method description for details. Attributes: - instance_reference_resource (google.cloud.compute_v1.types.InstanceReference): - The body resource for this request + composite_health_check (str): + Name of the CompositeHealthCheck resource to + get health for. project (str): - Project ID for this request. + Name of the project scoping this request. region (str): Name of the region scoping this request. - target_pool (str): - Name of the TargetPool resource to which the - queried instance belongs. """ - instance_reference_resource: "InstanceReference" = proto.Field( - proto.MESSAGE, - number=292926060, - message="InstanceReference", + composite_health_check: str = proto.Field( + proto.STRING, + number=466984989, ) project: str = proto.Field( proto.STRING, @@ -41071,182 +42674,76 @@ class GetHealthTargetPoolRequest(proto.Message): proto.STRING, number=138946292, ) - target_pool: str = proto.Field( - proto.STRING, - number=62796298, - ) -class GetIamPolicyBackendBucketRequest(proto.Message): - r"""A request message for BackendBuckets.GetIamPolicy. See the +class GetHealthRegionHealthSourceRequest(proto.Message): + r"""A request message for RegionHealthSources.GetHealth. See the method description for details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. + health_source (str): + Name of the HealthSource resource to get + health for. project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. + Name of the project scoping this request. + region (str): + Name of the region scoping this request. """ - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( + health_source: str = proto.Field( proto.STRING, - number=195806222, - ) - - -class GetIamPolicyBackendServiceRequest(proto.Message): - r"""A request message for BackendServices.GetIamPolicy. See the - method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - """ - - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, + number=376521566, ) project: str = proto.Field( proto.STRING, number=227560217, ) - resource: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=195806222, + number=138946292, ) -class GetIamPolicyDiskRequest(proto.Message): - r"""A request message for Disks.GetIamPolicy. See the method +class GetHealthTargetPoolRequest(proto.Message): + r"""A request message for TargetPools.GetHealth. See the method description for details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. + instance_reference_resource (google.cloud.compute_v1.types.InstanceReference): + The body resource for this request project (str): Project ID for this request. - resource (str): - Name or id of the resource for this request. - zone (str): - The name of the zone for this request. + region (str): + Name of the region scoping this request. + target_pool (str): + Name of the TargetPool resource to which the + queried instance belongs. """ - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, + instance_reference_resource: "InstanceReference" = proto.Field( + proto.MESSAGE, + number=292926060, + message="InstanceReference", ) project: str = proto.Field( proto.STRING, number=227560217, ) - resource: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=195806222, + number=138946292, ) - zone: str = proto.Field( + target_pool: str = proto.Field( proto.STRING, - number=3744684, + number=62796298, ) -class GetIamPolicyFirewallPolicyRequest(proto.Message): - r"""A request message for FirewallPolicies.GetIamPolicy. See the +class GetIamPolicyBackendBucketRequest(proto.Message): + r"""A request message for BackendBuckets.GetIamPolicy. See the method description for details. - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. - resource (str): - Name or id of the resource for this request. - """ - - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class GetIamPolicyImageRequest(proto.Message): - r"""A request message for Images.GetIamPolicy. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - options_requested_policy_version (int): - Requested IAM Policy version. - - This field is a member of `oneof`_ ``_options_requested_policy_version``. - project (str): - Project ID for this request. - resource (str): - Name or id of the resource for this request. - """ - - options_requested_policy_version: int = proto.Field( - proto.INT32, - number=499220029, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - resource: str = proto.Field( - proto.STRING, - number=195806222, - ) - - -class GetIamPolicyInstanceRequest(proto.Message): - r"""A request message for Instances.GetIamPolicy. See the method - description for details. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: @@ -41258,8 +42755,6 @@ class GetIamPolicyInstanceRequest(proto.Message): Project ID for this request. resource (str): Name or id of the resource for this request. - zone (str): - The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41275,14 +42770,10 @@ class GetIamPolicyInstanceRequest(proto.Message): proto.STRING, number=195806222, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class GetIamPolicyInstanceTemplateRequest(proto.Message): - r"""A request message for InstanceTemplates.GetIamPolicy. See the +class GetIamPolicyBackendServiceRequest(proto.Message): + r"""A request message for BackendServices.GetIamPolicy. See the method description for details. @@ -41314,9 +42805,9 @@ class GetIamPolicyInstanceTemplateRequest(proto.Message): ) -class GetIamPolicyInstantSnapshotRequest(proto.Message): - r"""A request message for InstantSnapshots.GetIamPolicy. See the - method description for details. +class GetIamPolicyDiskRequest(proto.Message): + r"""A request message for Disks.GetIamPolicy. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41353,9 +42844,35 @@ class GetIamPolicyInstantSnapshotRequest(proto.Message): ) -class GetIamPolicyInterconnectAttachmentGroupRequest(proto.Message): - r"""A request message for - InterconnectAttachmentGroups.GetIamPolicy. See the method +class GetIamPolicyFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyImageRequest(proto.Message): + r"""A request message for Images.GetIamPolicy. See the method description for details. @@ -41387,9 +42904,9 @@ class GetIamPolicyInterconnectAttachmentGroupRequest(proto.Message): ) -class GetIamPolicyInterconnectGroupRequest(proto.Message): - r"""A request message for InterconnectGroups.GetIamPolicy. See - the method description for details. +class GetIamPolicyInstanceRequest(proto.Message): + r"""A request message for Instances.GetIamPolicy. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41403,6 +42920,8 @@ class GetIamPolicyInterconnectGroupRequest(proto.Message): Project ID for this request. resource (str): Name or id of the resource for this request. + zone (str): + The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41418,11 +42937,15 @@ class GetIamPolicyInterconnectGroupRequest(proto.Message): proto.STRING, number=195806222, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class GetIamPolicyLicenseRequest(proto.Message): - r"""A request message for Licenses.GetIamPolicy. See the method - description for details. +class GetIamPolicyInstanceTemplateRequest(proto.Message): + r"""A request message for InstanceTemplates.GetIamPolicy. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41453,9 +42976,9 @@ class GetIamPolicyLicenseRequest(proto.Message): ) -class GetIamPolicyMachineImageRequest(proto.Message): - r"""A request message for MachineImages.GetIamPolicy. See the - method description for details. +class GetIamPolicyInstantSnapshotGroupRequest(proto.Message): + r"""A request message for InstantSnapshotGroups.GetIamPolicy. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41469,6 +42992,8 @@ class GetIamPolicyMachineImageRequest(proto.Message): Project ID for this request. resource (str): Name or id of the resource for this request. + zone (str): + The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41484,11 +43009,15 @@ class GetIamPolicyMachineImageRequest(proto.Message): proto.STRING, number=195806222, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class GetIamPolicyNetworkAttachmentRequest(proto.Message): - r"""A request message for NetworkAttachments.GetIamPolicy. See - the method description for details. +class GetIamPolicyInstantSnapshotRequest(proto.Message): + r"""A request message for InstantSnapshots.GetIamPolicy. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41500,10 +43029,10 @@ class GetIamPolicyNetworkAttachmentRequest(proto.Message): This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. - region (str): - The name of the region for this request. resource (str): Name or id of the resource for this request. + zone (str): + The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41515,9 +43044,43 @@ class GetIamPolicyNetworkAttachmentRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( + resource: str = proto.Field( proto.STRING, - number=138946292, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyInterconnectAttachmentGroupRequest(proto.Message): + r"""A request message for + InterconnectAttachmentGroups.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, ) resource: str = proto.Field( proto.STRING, @@ -41525,9 +43088,9 @@ class GetIamPolicyNetworkAttachmentRequest(proto.Message): ) -class GetIamPolicyNetworkFirewallPolicyRequest(proto.Message): - r"""A request message for NetworkFirewallPolicies.GetIamPolicy. - See the method description for details. +class GetIamPolicyInterconnectGroupRequest(proto.Message): + r"""A request message for InterconnectGroups.GetIamPolicy. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41558,8 +43121,8 @@ class GetIamPolicyNetworkFirewallPolicyRequest(proto.Message): ) -class GetIamPolicyNodeGroupRequest(proto.Message): - r"""A request message for NodeGroups.GetIamPolicy. See the method +class GetIamPolicyLicenseRequest(proto.Message): + r"""A request message for Licenses.GetIamPolicy. See the method description for details. @@ -41574,8 +43137,6 @@ class GetIamPolicyNodeGroupRequest(proto.Message): Project ID for this request. resource (str): Name or id of the resource for this request. - zone (str): - The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41591,14 +43152,10 @@ class GetIamPolicyNodeGroupRequest(proto.Message): proto.STRING, number=195806222, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class GetIamPolicyNodeTemplateRequest(proto.Message): - r"""A request message for NodeTemplates.GetIamPolicy. See the +class GetIamPolicyMachineImageRequest(proto.Message): + r"""A request message for MachineImages.GetIamPolicy. See the method description for details. @@ -41611,8 +43168,6 @@ class GetIamPolicyNodeTemplateRequest(proto.Message): This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. - region (str): - The name of the region for this request. resource (str): Name or id of the resource for this request. """ @@ -41626,18 +43181,14 @@ class GetIamPolicyNodeTemplateRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) resource: str = proto.Field( proto.STRING, number=195806222, ) -class GetIamPolicyRegionBackendServiceRequest(proto.Message): - r"""A request message for RegionBackendServices.GetIamPolicy. See +class GetIamPolicyNetworkAttachmentRequest(proto.Message): + r"""A request message for NetworkAttachments.GetIamPolicy. See the method description for details. @@ -41675,9 +43226,9 @@ class GetIamPolicyRegionBackendServiceRequest(proto.Message): ) -class GetIamPolicyRegionDiskRequest(proto.Message): - r"""A request message for RegionDisks.GetIamPolicy. See the - method description for details. +class GetIamPolicyNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.GetIamPolicy. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41689,8 +43240,6 @@ class GetIamPolicyRegionDiskRequest(proto.Message): This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. - region (str): - The name of the region for this request. resource (str): Name or id of the resource for this request. """ @@ -41704,19 +43253,15 @@ class GetIamPolicyRegionDiskRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) resource: str = proto.Field( proto.STRING, number=195806222, ) -class GetIamPolicyRegionInstantSnapshotRequest(proto.Message): - r"""A request message for RegionInstantSnapshots.GetIamPolicy. - See the method description for details. +class GetIamPolicyNodeGroupRequest(proto.Message): + r"""A request message for NodeGroups.GetIamPolicy. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41728,10 +43273,10 @@ class GetIamPolicyRegionInstantSnapshotRequest(proto.Message): This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. - region (str): - The name of the region for this request. resource (str): Name or id of the resource for this request. + zone (str): + The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41743,20 +43288,19 @@ class GetIamPolicyRegionInstantSnapshotRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) resource: str = proto.Field( proto.STRING, number=195806222, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class GetIamPolicyRegionNetworkFirewallPolicyRequest(proto.Message): - r"""A request message for - RegionNetworkFirewallPolicies.GetIamPolicy. See the method - description for details. +class GetIamPolicyNodeTemplateRequest(proto.Message): + r"""A request message for NodeTemplates.GetIamPolicy. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41793,9 +43337,9 @@ class GetIamPolicyRegionNetworkFirewallPolicyRequest(proto.Message): ) -class GetIamPolicyReservationBlockRequest(proto.Message): - r"""A request message for ReservationBlocks.GetIamPolicy. See the - method description for details. +class GetIamPolicyRegionBackendBucketRequest(proto.Message): + r"""A request message for RegionBackendBuckets.GetIamPolicy. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41805,15 +43349,12 @@ class GetIamPolicyReservationBlockRequest(proto.Message): Requested IAM Policy version. This field is a member of `oneof`_ ``_options_requested_policy_version``. - parent_resource (str): - Name or id of parent resource of the resource - for this request. project (str): Project ID for this request. + region (str): + The name of the region for this request. resource (str): Name or id of the resource for this request. - zone (str): - The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41821,27 +43362,23 @@ class GetIamPolicyReservationBlockRequest(proto.Message): number=499220029, optional=True, ) - parent_resource: str = proto.Field( - proto.STRING, - number=18091011, - ) project: str = proto.Field( proto.STRING, number=227560217, ) - resource: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=195806222, + number=138946292, ) - zone: str = proto.Field( + resource: str = proto.Field( proto.STRING, - number=3744684, + number=195806222, ) -class GetIamPolicyReservationRequest(proto.Message): - r"""A request message for Reservations.GetIamPolicy. See the - method description for details. +class GetIamPolicyRegionBackendServiceRequest(proto.Message): + r"""A request message for RegionBackendServices.GetIamPolicy. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41853,10 +43390,10 @@ class GetIamPolicyReservationRequest(proto.Message): This field is a member of `oneof`_ ``_options_requested_policy_version``. project (str): Project ID for this request. + region (str): + The name of the region for this request. resource (str): Name or id of the resource for this request. - zone (str): - The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41868,19 +43405,19 @@ class GetIamPolicyReservationRequest(proto.Message): proto.STRING, number=227560217, ) - resource: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=195806222, + number=138946292, ) - zone: str = proto.Field( + resource: str = proto.Field( proto.STRING, - number=3744684, + number=195806222, ) -class GetIamPolicyReservationSubBlockRequest(proto.Message): - r"""A request message for ReservationSubBlocks.GetIamPolicy. See - the method description for details. +class GetIamPolicyRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.GetIamPolicy. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41890,15 +43427,12 @@ class GetIamPolicyReservationSubBlockRequest(proto.Message): Requested IAM Policy version. This field is a member of `oneof`_ ``_options_requested_policy_version``. - parent_resource (str): - Name or id of parent resource of the resource - for this request. project (str): Project ID for this request. + region (str): + The name of the region for this request. resource (str): Name or id of the resource for this request. - zone (str): - The name of the zone for this request. """ options_requested_policy_version: int = proto.Field( @@ -41906,27 +43440,24 @@ class GetIamPolicyReservationSubBlockRequest(proto.Message): number=499220029, optional=True, ) - parent_resource: str = proto.Field( - proto.STRING, - number=18091011, - ) project: str = proto.Field( proto.STRING, number=227560217, ) - resource: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=195806222, + number=138946292, ) - zone: str = proto.Field( + resource: str = proto.Field( proto.STRING, - number=3744684, + number=195806222, ) -class GetIamPolicyResourcePolicyRequest(proto.Message): - r"""A request message for ResourcePolicies.GetIamPolicy. See the - method description for details. +class GetIamPolicyRegionInstantSnapshotGroupRequest(proto.Message): + r"""A request message for + RegionInstantSnapshotGroups.GetIamPolicy. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -41963,9 +43494,297 @@ class GetIamPolicyResourcePolicyRequest(proto.Message): ) -class GetIamPolicyServiceAttachmentRequest(proto.Message): - r"""A request message for ServiceAttachments.GetIamPolicy. See - the method description for details. +class GetIamPolicyRegionInstantSnapshotRequest(proto.Message): + r"""A request message for RegionInstantSnapshots.GetIamPolicy. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for + RegionNetworkFirewallPolicies.GetIamPolicy. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyReservationBlockRequest(proto.Message): + r"""A request message for ReservationBlocks.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + parent_resource (str): + Name or id of parent resource of the resource + for this request. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + parent_resource: str = proto.Field( + proto.STRING, + number=18091011, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyReservationRequest(proto.Message): + r"""A request message for Reservations.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyReservationSubBlockRequest(proto.Message): + r"""A request message for ReservationSubBlocks.GetIamPolicy. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + parent_resource (str): + Name or id of parent resource of the resource + for this request. + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + parent_resource: str = proto.Field( + proto.STRING, + number=18091011, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class GetIamPolicyResourcePolicyRequest(proto.Message): + r"""A request message for ResourcePolicies.GetIamPolicy. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + options_requested_policy_version (int): + Requested IAM Policy version. + + This field is a member of `oneof`_ ``_options_requested_policy_version``. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + """ + + options_requested_policy_version: int = proto.Field( + proto.INT32, + number=499220029, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + +class GetIamPolicyServiceAttachmentRequest(proto.Message): + r"""A request message for ServiceAttachments.GetIamPolicy. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -42326,6 +44145,34 @@ class GetInstanceTemplateRequest(proto.Message): ) +class GetInstantSnapshotGroupRequest(proto.Message): + r"""A request message for InstantSnapshotGroups.Get. See the + method description for details. + + Attributes: + instant_snapshot_group (str): + Name of the InstantSnapshotGroup resource to + return. + project (str): + Project ID for this request. + zone (str): + The name of the zone for this request. + """ + + instant_snapshot_group: str = proto.Field( + proto.STRING, + number=223180386, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class GetInstantSnapshotRequest(proto.Message): r"""A request message for InstantSnapshots.Get. See the method description for details. @@ -43296,6 +45143,33 @@ class GetRegionAutoscalerRequest(proto.Message): ) +class GetRegionBackendBucketRequest(proto.Message): + r"""A request message for RegionBackendBuckets.Get. See the + method description for details. + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to return. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + class GetRegionBackendServiceRequest(proto.Message): r"""A request message for RegionBackendServices.Get. See the method description for details. @@ -43572,6 +45446,46 @@ class GetRegionInstanceGroupManagerRequest(proto.Message): ) +class GetRegionInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagerResizeRequests.Get. See the method + description for details. + + Attributes: + instance_group_manager (str): + The name of the managed instance group. + Name should conform to RFC1035 or be a resource + ID. + project (str): + Project ID for this request. + region (str): + The name of the region + scoping this request. Name should conform to + RFC1035. + resize_request (str): + The name of the resize request. + Name should conform to RFC1035 or be a resource + ID. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resize_request: str = proto.Field( + proto.STRING, + number=216941060, + ) + + class GetRegionInstanceGroupRequest(proto.Message): r"""A request message for RegionInstanceGroups.Get. See the method description for details. @@ -43627,6 +45541,34 @@ class GetRegionInstanceTemplateRequest(proto.Message): ) +class GetRegionInstantSnapshotGroupRequest(proto.Message): + r"""A request message for RegionInstantSnapshotGroups.Get. See + the method description for details. + + Attributes: + instant_snapshot_group (str): + Name of the InstantSnapshotGroup resource to + return. + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + """ + + instant_snapshot_group: str = proto.Field( + proto.STRING, + number=223180386, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + class GetRegionInstantSnapshotRequest(proto.Message): r"""A request message for RegionInstantSnapshots.Get. See the method description for details. @@ -43816,6 +45758,54 @@ class GetRegionSecurityPolicyRequest(proto.Message): ) +class GetRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.Get. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + snapshot (str): + Name of the Snapshot resource to return. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + snapshot: str = proto.Field( + proto.STRING, + number=284874180, + ) + + +class GetRegionSnapshotSettingRequest(proto.Message): + r"""A request message for RegionSnapshotSettings.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + + class GetRegionSslCertificateRequest(proto.Message): r"""A request message for RegionSslCertificates.Get. See the method description for details. @@ -45677,6 +47667,34 @@ class GetZoneRequest(proto.Message): ) +class GetZoneVmExtensionPolicyRequest(proto.Message): + r"""A request message for ZoneVmExtensionPolicies.Get. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + vm_extension_policy (str): + Name of the VM extension policy resource to + return. + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + vm_extension_policy: str = proto.Field( + proto.STRING, + number=331532730, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class GlobalAddressesMoveRequest(proto.Message): r""" @@ -46148,6 +48166,7 @@ class GuestOsFeature(proto.Message): - TDX_CAPABLE - IDPF - SNP_SVSM_CAPABLE + - CCA_CAPABLE For more information, see Enabling guest operating system features. Check the Type enum for the list of possible @@ -46174,6 +48193,7 @@ class Type(proto.Enum): - TDX_CAPABLE - IDPF - SNP_SVSM_CAPABLE + - CCA_CAPABLE For more information, see Enabling guest operating system features. @@ -48402,6 +50422,67 @@ def raw_page(self): ) +class HealthSourceHealth(proto.Message): + r"""Response message for RegionHealthSources.GetHealth + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_state (str): + Health state of the HealthSource. + Check the HealthState enum for the list of + possible values. + + This field is a member of `oneof`_ ``_health_state``. + kind (str): + Output only. [Output Only] Type of resource. + Alwayscompute#healthSourceHealth for the health of health + sources. + + This field is a member of `oneof`_ ``_kind``. + sources (MutableSequence[google.cloud.compute_v1.types.HealthSourcesGetHealthResponseSourceInfo]): + Health state details of the sources. + """ + + class HealthState(proto.Enum): + r"""Health state of the HealthSource. + + Values: + UNDEFINED_HEALTH_STATE (0): + A value indicating that the enum field is not + set. + HEALTHY (439801213): + No description available. + UNHEALTHY (462118084): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_HEALTH_STATE = 0 + HEALTHY = 439801213 + UNHEALTHY = 462118084 + UNKNOWN = 433141802 + + health_state: str = proto.Field( + proto.STRING, + number=324007150, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + sources: MutableSequence["HealthSourcesGetHealthResponseSourceInfo"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=125606840, + message="HealthSourcesGetHealthResponseSourceInfo", + ) + ) + + class HealthSourceList(proto.Message): r""" @@ -48476,6 +50557,89 @@ def raw_page(self): ) +class HealthSourcesGetHealthResponseSourceInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backends (MutableSequence[google.cloud.compute_v1.types.HealthSourcesGetHealthResponseSourceInfoBackendInfo]): + Represents an instance group or network endpoint group + behind the source backend service. Only used if the + sourceType of the regionHealthSource is BACKEND_SERVICE. + forwarding_rule (str): + Fully qualified URL of the forwarding rule + associated with the source resource if it is a + L4ILB backend service. + + This field is a member of `oneof`_ ``_forwarding_rule``. + source (str): + Fully qualified URL of the associated source + resource. This is always a backend service URL. + + This field is a member of `oneof`_ ``_source``. + """ + + backends: MutableSequence["HealthSourcesGetHealthResponseSourceInfoBackendInfo"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=510839903, + message="HealthSourcesGetHealthResponseSourceInfoBackendInfo", + ) + ) + forwarding_rule: str = proto.Field( + proto.STRING, + number=269964030, + optional=True, + ) + source: str = proto.Field( + proto.STRING, + number=177235995, + optional=True, + ) + + +class HealthSourcesGetHealthResponseSourceInfoBackendInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + endpoint_count (int): + Total number of endpoints when determining + the health of the regionHealthSource. + + This field is a member of `oneof`_ ``_endpoint_count``. + group (str): + Fully qualified URL of an instance group or + network endpoint group behind the source backend + service. + + This field is a member of `oneof`_ ``_group``. + healthy_endpoint_count (int): + Number of endpoints considered healthy when + determining health of the regionHealthSource. + + This field is a member of `oneof`_ ``_healthy_endpoint_count``. + """ + + endpoint_count: int = proto.Field( + proto.INT32, + number=486641125, + optional=True, + ) + group: str = proto.Field( + proto.STRING, + number=98629247, + optional=True, + ) + healthy_endpoint_count: int = proto.Field( + proto.INT32, + number=291284167, + optional=True, + ) + + class HealthSourcesScopedList(proto.Message): r""" @@ -49618,6 +51782,14 @@ class HttpRouteAction(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + cache_policy (google.cloud.compute_v1.types.CachePolicy): + Specifies the cache policy configuration for matched + traffic. Available only for Global ``EXTERNAL_MANAGED`` load + balancer schemes. At least one property must be specified. + This policy cannot be specified if any target backend has + Identity-Aware Proxy enabled. + + This field is a member of `oneof`_ ``_cache_policy``. cors_policy (google.cloud.compute_v1.types.CorsPolicy): The specification for allowing client-side cross-origin requests. For more information @@ -49729,6 +51901,12 @@ class HttpRouteAction(proto.Message): specified in this HttpRouteAction. """ + cache_policy: "CachePolicy" = proto.Field( + proto.MESSAGE, + number=457366671, + optional=True, + message="CachePolicy", + ) cors_policy: "CorsPolicy" = proto.Field( proto.MESSAGE, number=398943748, @@ -52116,6 +54294,73 @@ class InsertInstanceTemplateRequest(proto.Message): ) +class InsertInstantSnapshotGroupRequest(proto.Message): + r"""A request message for InstantSnapshotGroups.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instant_snapshot_group_resource (google.cloud.compute_v1.types.InstantSnapshotGroup): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_consistency_group (str): + begin_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_source_consistency_group``. + zone (str): + Name of the zone for this request. + """ + + instant_snapshot_group_resource: "InstantSnapshotGroup" = proto.Field( + proto.MESSAGE, + number=145067339, + message="InstantSnapshotGroup", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_consistency_group: str = proto.Field( + proto.STRING, + number=531359348, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class InsertInstantSnapshotRequest(proto.Message): r"""A request message for InstantSnapshots.Insert. See the method description for details. @@ -53197,6 +55442,64 @@ class InsertRegionAutoscalerRequest(proto.Message): ) +class InsertRegionBackendBucketRequest(proto.Message): + r"""A request message for RegionBackendBuckets.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region of this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket_resource: "BackendBucket" = proto.Field( + proto.MESSAGE, + number=380757784, + message="BackendBucket", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class InsertRegionBackendServiceRequest(proto.Message): r"""A request message for RegionBackendServices.Insert. See the method description for details. @@ -53729,6 +56032,75 @@ class InsertRegionInstanceGroupManagerRequest(proto.Message): ) +class InsertRegionInstanceGroupManagerResizeRequestRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagerResizeRequests.Insert. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instance_group_manager (str): + Name of the managed instance group to which + the resize request is scoped. Name should + conform to RFC1035 or be a resource ID. + instance_group_manager_resize_request_resource (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region + scoping this request. Name should conform to + RFC1035. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) + instance_group_manager_resize_request_resource: "InstanceGroupManagerResizeRequest" = proto.Field( + proto.MESSAGE, + number=468541293, + message="InstanceGroupManagerResizeRequest", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class InsertRegionInstanceTemplateRequest(proto.Message): r"""A request message for RegionInstanceTemplates.Insert. See the method description for details. @@ -53787,6 +56159,73 @@ class InsertRegionInstanceTemplateRequest(proto.Message): ) +class InsertRegionInstantSnapshotGroupRequest(proto.Message): + r"""A request message for RegionInstantSnapshotGroups.Insert. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + instant_snapshot_group_resource (google.cloud.compute_v1.types.InstantSnapshotGroup): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + source_consistency_group (str): + begin_interface: MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_source_consistency_group``. + """ + + instant_snapshot_group_resource: "InstantSnapshotGroup" = proto.Field( + proto.MESSAGE, + number=145067339, + message="InstantSnapshotGroup", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + source_consistency_group: str = proto.Field( + proto.STRING, + number=531359348, + optional=True, + ) + + class InsertRegionInstantSnapshotRequest(proto.Message): r"""A request message for RegionInstantSnapshots.Insert. See the method description for details. @@ -54088,9 +56527,9 @@ class InsertRegionSecurityPolicyRequest(proto.Message): ) -class InsertRegionSslCertificateRequest(proto.Message): - r"""A request message for RegionSslCertificates.Insert. See the - method description for details. +class InsertRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.Insert. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -54099,7 +56538,7 @@ class InsertRegionSslCertificateRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -54122,7 +56561,7 @@ class InsertRegionSslCertificateRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): + snapshot_resource (google.cloud.compute_v1.types.Snapshot): The body resource for this request """ @@ -54139,15 +56578,15 @@ class InsertRegionSslCertificateRequest(proto.Message): number=37109963, optional=True, ) - ssl_certificate_resource: "SslCertificate" = proto.Field( + snapshot_resource: "Snapshot" = proto.Field( proto.MESSAGE, - number=180709897, - message="SslCertificate", + number=481319977, + message="Snapshot", ) -class InsertRegionSslPolicyRequest(proto.Message): - r"""A request message for RegionSslPolicies.Insert. See the +class InsertRegionSslCertificateRequest(proto.Message): + r"""A request message for RegionSslCertificates.Insert. See the method description for details. @@ -54180,7 +56619,7 @@ class InsertRegionSslPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + ssl_certificate_resource (google.cloud.compute_v1.types.SslCertificate): The body resource for this request """ @@ -54197,15 +56636,73 @@ class InsertRegionSslPolicyRequest(proto.Message): number=37109963, optional=True, ) - ssl_policy_resource: "SslPolicy" = proto.Field( + ssl_certificate_resource: "SslCertificate" = proto.Field( proto.MESSAGE, - number=274891848, - message="SslPolicy", + number=180709897, + message="SslCertificate", ) -class InsertRegionTargetHttpProxyRequest(proto.Message): - r"""A request message for RegionTargetHttpProxies.Insert. See the +class InsertRegionSslPolicyRequest(proto.Message): + r"""A request message for RegionSslPolicies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + ssl_policy_resource (google.cloud.compute_v1.types.SslPolicy): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + ssl_policy_resource: "SslPolicy" = proto.Field( + proto.MESSAGE, + number=274891848, + message="SslPolicy", + ) + + +class InsertRegionTargetHttpProxyRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.Insert. See the method description for details. @@ -55703,6 +58200,64 @@ class InsertWireGroupRequest(proto.Message): ) +class InsertZoneVmExtensionPolicyRequest(proto.Message): + r"""A request message for ZoneVmExtensionPolicies.Insert. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vm_extension_policy_resource (google.cloud.compute_v1.types.VmExtensionPolicy): + The body resource for this request + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vm_extension_policy_resource: "VmExtensionPolicy" = proto.Field( + proto.MESSAGE, + number=515669235, + message="VmExtensionPolicy", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class Instance(proto.Message): r"""Represents an Instance resource. @@ -56997,14 +59552,15 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_base_instance_name``. creation_timestamp (str): - Output only. [Output Only] The creation timestamp for this + Output only. The creation timestamp for this managed instance group inRFC3339 text format. This field is a member of `oneof`_ ``_creation_timestamp``. current_actions (google.cloud.compute_v1.types.InstanceGroupManagerActionsSummary): - Output only. [Output Only] The list of instance actions and - the number of instances in this managed instance group that - are scheduled for each of those actions. + Output only. The list of instance actions and + the number of instances in this managed instance + group that are scheduled for each of those + actions. This field is a member of `oneof`_ ``_current_actions``. description (str): @@ -57031,8 +59587,9 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_fingerprint``. id (int): - Output only. [Output Only] A unique identifier for this - resource type. The server generates this identifier. + Output only. A unique identifier for this + resource type. The server generates this + identifier. This field is a member of `oneof`_ ``_id``. instance_flexibility_policy (google.cloud.compute_v1.types.InstanceGroupManagerInstanceFlexibilityPolicy): @@ -57043,7 +59600,7 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_instance_flexibility_policy``. instance_group (str): - Output only. [Output Only] The URL of the Instance Group + Output only. The URL of the Instance Group resource. This field is a member of `oneof`_ ``_instance_group``. @@ -57064,9 +59621,9 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_instance_template``. kind (str): - Output only. [Output Only] The resource type, which is - alwayscompute#instanceGroupManager for managed instance - groups. + Output only. The resource type, which is + alwayscompute#instanceGroupManager for managed + instance groups. This field is a member of `oneof`_ ``_kind``. list_managed_instances_results (str): @@ -57097,16 +59654,16 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_resource_policies``. satisfies_pzi (bool): - Output only. [Output Only] Reserved for future use. + Output only. Reserved for future use. This field is a member of `oneof`_ ``_satisfies_pzi``. satisfies_pzs (bool): - Output only. [Output Only] Reserved for future use. + Output only. Reserved for future use. This field is a member of `oneof`_ ``_satisfies_pzs``. self_link (str): - Output only. [Output Only] The URL for this managed instance - group. The server defines this URL. + Output only. The URL for this managed + instance group. The server defines this URL. This field is a member of `oneof`_ ``_self_link``. standby_policy (google.cloud.compute_v1.types.InstanceGroupManagerStandbyPolicy): @@ -57120,7 +59677,7 @@ class InstanceGroupManager(proto.Message): This field is a member of `oneof`_ ``_stateful_policy``. status (google.cloud.compute_v1.types.InstanceGroupManagerStatus): - Output only. [Output Only] The status of this managed + Output only. The status of this managed instance group. This field is a member of `oneof`_ ``_status``. @@ -57186,8 +59743,9 @@ class InstanceGroupManager(proto.Message): will be applied to all remaining instances. For more information, read aboutcanary updates. zone (str): - Output only. [Output Only] The URL of azone where the - managed instance group is located (for zonal resources). + Output only. The URL of azone + where the managed instance group is located (for + zonal resources). This field is a member of `oneof`_ ``_zone``. """ @@ -57406,94 +59964,99 @@ class InstanceGroupManagerActionsSummary(proto.Message): Attributes: abandoning (int): - Output only. [Output Only] The total number of instances in - the managed instance group that are scheduled to be - abandoned. Abandoning an instance removes it from the - managed instance group without deleting it. + Output only. The total number of instances in + the managed instance group that are scheduled to + be abandoned. Abandoning an instance removes it + from the managed instance group without deleting + it. This field is a member of `oneof`_ ``_abandoning``. creating (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be created or - are currently being created. If the group fails to create - any of these instances, it tries again until it creates the - instance successfully. + Output only. The number of instances in the + managed instance group that are scheduled to be + created or are currently being created. If the + group fails to create any of these instances, it + tries again until it creates the instance + successfully. - If you have disabled creation retries, this field will not - be populated; instead, the creatingWithoutRetries field will - be populated. + If you have disabled creation retries, this + field will not be populated; instead, the + creatingWithoutRetries field will be populated. This field is a member of `oneof`_ ``_creating``. creating_without_retries (int): - Output only. [Output Only] The number of instances that the - managed instance group will attempt to create. The group - attempts to create each instance only once. If the group - fails to create any of these instances, it decreases the - group's targetSize value accordingly. + Output only. The number of instances that the + managed instance group will attempt to create. + The group attempts to create each instance only + once. If the group fails to create any of these + instances, it decreases the group's targetSize + value accordingly. This field is a member of `oneof`_ ``_creating_without_retries``. deleting (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be deleted or - are currently being deleted. + Output only. The number of instances in the + managed instance group that are scheduled to be + deleted or are currently being deleted. This field is a member of `oneof`_ ``_deleting``. none (int): - Output only. [Output Only] The number of instances in the - managed instance group that are running and have no - scheduled actions. + Output only. The number of instances in the + managed instance group that are running and have + no scheduled actions. This field is a member of `oneof`_ ``_none``. recreating (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be recreated or - are currently being being recreated. Recreating an instance - deletes the existing root persistent disk and creates a new - disk from the image that is defined in the instance - template. + Output only. The number of instances in the + managed instance group that are scheduled to be + recreated or are currently being being + recreated. Recreating an instance deletes the + existing root persistent disk and creates a new + disk from the image that is defined in the + instance template. This field is a member of `oneof`_ ``_recreating``. refreshing (int): - Output only. [Output Only] The number of instances in the - managed instance group that are being reconfigured with - properties that do not require a restart or a recreate - action. For example, setting or removing target pools for - the instance. + Output only. The number of instances in the + managed instance group that are being + reconfigured with properties that do not require + a restart or a recreate action. For example, + setting or removing target pools for the + instance. This field is a member of `oneof`_ ``_refreshing``. restarting (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be restarted or - are currently being restarted. + Output only. The number of instances in the + managed instance group that are scheduled to be + restarted or are currently being restarted. This field is a member of `oneof`_ ``_restarting``. resuming (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be resumed or - are currently being resumed. + Output only. The number of instances in the + managed instance group that are scheduled to be + resumed or are currently being resumed. This field is a member of `oneof`_ ``_resuming``. starting (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be started or - are currently being started. + Output only. The number of instances in the + managed instance group that are scheduled to be + started or are currently being started. This field is a member of `oneof`_ ``_starting``. stopping (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be stopped or - are currently being stopped. + Output only. The number of instances in the + managed instance group that are scheduled to be + stopped or are currently being stopped. This field is a member of `oneof`_ ``_stopping``. suspending (int): - Output only. [Output Only] The number of instances in the - managed instance group that are scheduled to be suspended or - are currently being suspended. + Output only. The number of instances in the + managed instance group that are scheduled to be + suspended or are currently being suspended. This field is a member of `oneof`_ ``_suspending``. verifying (int): - Output only. [Output Only] The number of instances in the - managed instance group that are being verified. See the + Output only. The number of instances in the managed instance + group that are being verified. See the managedInstances[].currentAction property in the listManagedInstances method documentation. @@ -58032,7 +60595,7 @@ class InstanceGroupManagerResizeRequest(proto.Message): Attributes: creation_timestamp (str): - Output only. [Output Only] The creation timestamp for this + Output only. The creation timestamp for this resize request inRFC3339 text format. This field is a member of `oneof`_ ``_creation_timestamp``. @@ -58041,14 +60604,15 @@ class InstanceGroupManagerResizeRequest(proto.Message): This field is a member of `oneof`_ ``_description``. id (int): - Output only. [Output Only] A unique identifier for this - resource type. The server generates this identifier. + Output only. A unique identifier for this + resource type. The server generates this + identifier. This field is a member of `oneof`_ ``_id``. kind (str): - Output only. [Output Only] The resource type, which is - alwayscompute#instanceGroupManagerResizeRequest for resize - requests. + Output only. The resource type, which is + alwayscompute#instanceGroupManagerResizeRequest + for resize requests. This field is a member of `oneof`_ ``_kind``. name (str): @@ -58057,6 +60621,12 @@ class InstanceGroupManagerResizeRequest(proto.Message): withRFC1035. This field is a member of `oneof`_ ``_name``. + region (str): + Output only. The URL of a region + where the resize request is located. Populated + only for regional resize requests. + + This field is a member of `oneof`_ ``_region``. requested_run_duration (google.cloud.compute_v1.types.Duration): Requested run duration for instances that will be created by this request. At the end of @@ -58071,34 +60641,35 @@ class InstanceGroupManagerResizeRequest(proto.Message): This field is a member of `oneof`_ ``_resize_by``. self_link (str): - Output only. [Output Only] The URL for this resize request. + Output only. The URL for this resize request. The server defines this URL. This field is a member of `oneof`_ ``_self_link``. self_link_with_id (str): - Output only. [Output Only] Server-defined URL for this + Output only. Server-defined URL for this resource with the resource id. This field is a member of `oneof`_ ``_self_link_with_id``. state (str): - Output only. [Output only] Current state of the request. - Check the State enum for the list of possible values. + Output only. Current state of the request. + Check the State enum for the list of possible + values. This field is a member of `oneof`_ ``_state``. status (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestStatus): - Output only. [Output only] Status of the request. + Output only. Status of the request. This field is a member of `oneof`_ ``_status``. zone (str): - Output only. [Output Only] The URL of azone where the resize - request is located. Populated only for zonal resize - requests. + Output only. The URL of a zone + where the resize request is located. Populated + only for zonal resize requests. This field is a member of `oneof`_ ``_zone``. """ class State(proto.Enum): - r"""Output only. [Output only] Current state of the request. + r"""Output only. Current state of the request. Values: UNDEFINED_STATE (0): @@ -58159,6 +60730,11 @@ class State(proto.Enum): number=3373707, optional=True, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) requested_run_duration: "Duration" = proto.Field( proto.MESSAGE, number=232146425, @@ -58205,25 +60781,26 @@ class InstanceGroupManagerResizeRequestStatus(proto.Message): Attributes: error (google.cloud.compute_v1.types.Error): - Output only. [Output only] Fatal errors encountered during - the queueing or provisioning phases of the ResizeRequest - that caused the transition to the FAILED state. Contrary to - the last_attempt errors, this field is final and errors are - never removed from here, as the ResizeRequest is not going - to retry. + Output only. Fatal errors encountered during the queueing or + provisioning phases of the ResizeRequest that caused the + transition to the FAILED state. Contrary to the last_attempt + errors, this field is final and errors are never removed + from here, as the ResizeRequest is not going to retry. This field is a member of `oneof`_ ``_error``. last_attempt (google.cloud.compute_v1.types.InstanceGroupManagerResizeRequestStatusLastAttempt): - Output only. [Output only] Information about the last - attempt to fulfill the request. The value is temporary since - the ResizeRequest can retry, as long as it's still active - and the last attempt value can either be cleared or replaced - with a different error. Since ResizeRequest retries - infrequently, the value may be stale and no longer show an - active problem. The value is cleared when ResizeRequest - transitions to the final state (becomes inactive). If the - final state is FAILED the error describing it will be - storred in the "error" field only. + Output only. Information about the last + attempt to fulfill the request. The value is + temporary since the ResizeRequest can retry, as + long as it's still active and the last attempt + value can either be cleared or replaced with a + different error. Since ResizeRequest retries + infrequently, the value may be stale and no + longer show an active problem. The value is + cleared when ResizeRequest transitions to the + final state (becomes inactive). If the final + state is FAILED the error describing it will be + stored in the "error" field only. This field is a member of `oneof`_ ``_last_attempt``. """ @@ -58435,43 +61012,53 @@ class InstanceGroupManagerStatus(proto.Message): Attributes: all_instances_config (google.cloud.compute_v1.types.InstanceGroupManagerStatusAllInstancesConfig): - Output only. [Output only] Status of all-instances + Output only. Status of all-instances configuration on the group. This field is a member of `oneof`_ ``_all_instances_config``. applied_accelerator_topologies (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManagerStatusAcceleratorTopology]): - Output only. [Output Only] The accelerator topology applied - to this MIG. Currently only one accelerator topology is - supported. + Output only. The accelerator topology applied + to this MIG. Currently only one accelerator + topology is supported. autoscaler (str): - Output only. [Output Only] The URL of theAutoscaler that - targets this instance group manager. + Output only. The URL of theAutoscaler + that targets this instance group manager. This field is a member of `oneof`_ ``_autoscaler``. bulk_instance_operation (google.cloud.compute_v1.types.InstanceGroupManagerStatusBulkInstanceOperation): - Output only. [Output Only] The status of bulk instance + Output only. The status of bulk instance operation. This field is a member of `oneof`_ ``_bulk_instance_operation``. + current_instance_statuses (google.cloud.compute_v1.types.InstanceGroupManagerStatusInstanceStatusSummary): + Output only. The list of instance statuses + and the number of instances in this managed + instance group that have the status. Currently + only shown for TPU MIGs + + This field is a member of `oneof`_ ``_current_instance_statuses``. is_stable (bool): - Output only. [Output Only] A bit indicating whether the - managed instance group is in a stable state. A stable state - means that: none of the instances in the managed instance - group is currently undergoing any type of change (for - example, creation, restart, or deletion); no future changes - are scheduled for instances in the managed instance group; - and the managed instance group itself is not being modified. + Output only. A bit indicating whether the + managed instance group is in a stable state. A + stable state means that: none of the instances + in the managed instance group is currently + undergoing any type of change (for example, + creation, restart, or deletion); no future + changes are scheduled for instances in the + managed instance group; and the managed instance + group itself is not being modified. This field is a member of `oneof`_ ``_is_stable``. stateful (google.cloud.compute_v1.types.InstanceGroupManagerStatusStateful): - Output only. [Output Only] Stateful status of the given + Output only. Stateful status of the given Instance Group Manager. This field is a member of `oneof`_ ``_stateful``. version_target (google.cloud.compute_v1.types.InstanceGroupManagerStatusVersionTarget): - Output only. [Output Only] A status of consistency of - Instances' versions with their target version specified by - version field on Instance Group Manager. + Output only. A status of consistency of + Instances' versions with their target version + specified by version field on Instance Group + Manager. This field is a member of `oneof`_ ``_version_target``. """ @@ -58502,6 +61089,14 @@ class InstanceGroupManagerStatus(proto.Message): message="InstanceGroupManagerStatusBulkInstanceOperation", ) ) + current_instance_statuses: "InstanceGroupManagerStatusInstanceStatusSummary" = ( + proto.Field( + proto.MESSAGE, + number=269432644, + optional=True, + message="InstanceGroupManagerStatusInstanceStatusSummary", + ) + ) is_stable: bool = proto.Field( proto.BOOL, number=108410864, @@ -58528,26 +61123,26 @@ class InstanceGroupManagerStatusAcceleratorTopology(proto.Message): Attributes: accelerator_topology (str): - Output only. [Output Only] Topology in the format of: - "16x16", "4x4x4", etc. The value is the same as configured - in the WorkloadPolicy. + Output only. Topology in the format of: + "16x16", "4x4x4", etc. The value is the same as + configured in the WorkloadPolicy. This field is a member of `oneof`_ ``_accelerator_topology``. state (str): - Output only. [Output Only] The state of the accelerator - topology. Check the State enum for the list of possible - values. + Output only. The state of the accelerator + topology. Check the State enum for the list of + possible values. This field is a member of `oneof`_ ``_state``. state_details (google.cloud.compute_v1.types.InstanceGroupManagerStatusAcceleratorTopologyAcceleratorTopologyStateDetails): - Output only. [Output Only] The result of the latest + Output only. The result of the latest accelerator topology state check. This field is a member of `oneof`_ ``_state_details``. """ class State(proto.Enum): - r"""Output only. [Output Only] The state of the accelerator topology. + r"""Output only. The state of the accelerator topology. Values: UNDEFINED_STATE (0): @@ -58606,12 +61201,13 @@ class InstanceGroupManagerStatusAcceleratorTopologyAcceleratorTopologyStateDetai Attributes: error (google.cloud.compute_v1.types.Error): - Output only. [Output Only] Encountered errors. + Output only. Encountered errors. This field is a member of `oneof`_ ``_error``. timestamp (str): - Output only. [Output Only] Timestamp is shown only if there - is an error. The field has // RFC3339 // text format. + Output only. Timestamp is shown only if there + is an error. The field has // RFC3339 // + text format. This field is a member of `oneof`_ ``_timestamp``. """ @@ -58636,15 +61232,15 @@ class InstanceGroupManagerStatusAllInstancesConfig(proto.Message): Attributes: current_revision (str): - Output only. [Output Only] Current all-instances - configuration revision. This value is in RFC3339 text - format. + Output only. Current all-instances + configuration revision. This value is in RFC3339 + text format. This field is a member of `oneof`_ ``_current_revision``. effective (bool): - Output only. [Output Only] A bit indicating whether this - configuration has been applied to all managed instances in - the group. + Output only. A bit indicating whether this + configuration has been applied to all managed + instances in the group. This field is a member of `oneof`_ ``_effective``. """ @@ -58670,12 +61266,12 @@ class InstanceGroupManagerStatusBulkInstanceOperation(proto.Message): Attributes: in_progress (bool): - Output only. [Output Only] Informs whether bulk instance + Output only. Informs whether bulk instance operation is in progress. This field is a member of `oneof`_ ``_in_progress``. last_progress_check (google.cloud.compute_v1.types.InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck): - Output only. [Output Only] Information from the last + Output only. Information from the last progress check of bulk instance operation. This field is a member of `oneof`_ ``_last_progress_check``. @@ -58701,14 +61297,14 @@ class InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck(proto.Mes Attributes: error (google.cloud.compute_v1.types.Error): - Output only. [Output Only] Errors encountered during bulk + Output only. Errors encountered during bulk instance operation. This field is a member of `oneof`_ ``_error``. timestamp (str): - Output only. [Output Only] Timestamp of the last progress - check of bulk instance operation. Timestamp is in RFC3339 - text format. + Output only. Timestamp of the last progress + check of bulk instance operation. Timestamp is + in RFC3339 text format. This field is a member of `oneof`_ ``_timestamp``. """ @@ -58726,6 +61322,167 @@ class InstanceGroupManagerStatusBulkInstanceOperationLastProgressCheck(proto.Mes ) +class InstanceGroupManagerStatusInstanceStatusSummary(proto.Message): + r"""The list of instance statuses and the number of instances in + this managed instance group that have the status. For more + information about how to interpret each status check the + instance lifecycle documentation. Currently only shown for TPU + MIGs. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deprovisioning (int): + Output only. The number of instances in the + managed instance group that have DEPROVISIONING + status. + + This field is a member of `oneof`_ ``_deprovisioning``. + non_existent (int): + Output only. The number of instances that + have not been created yet or have been deleted. + Includes only instances that would be shown in + the listManagedInstances method and not all + instances that have been deleted in the lifetime + of the MIG. + Does not include FlexStart instances that are + waiting for the resources availability, they are + considered as 'pending'. + + This field is a member of `oneof`_ ``_non_existent``. + pending (int): + Output only. The number of instances in the managed instance + group that have PENDING status, that is FlexStart instances + that are waiting for resources. Instances that do not exist + because of the other reasons are counted as 'non_existent'. + + This field is a member of `oneof`_ ``_pending``. + pending_stop (int): + Output only. The number of instances in the managed instance + group that have PENDING_STOP status. + + This field is a member of `oneof`_ ``_pending_stop``. + provisioning (int): + Output only. The number of instances in the + managed instance group that have PROVISIONING + status. + + This field is a member of `oneof`_ ``_provisioning``. + repairing (int): + Output only. The number of instances in the + managed instance group that have REPAIRING + status. + + This field is a member of `oneof`_ ``_repairing``. + running (int): + Output only. The number of instances in the + managed instance group that have RUNNING status. + + This field is a member of `oneof`_ ``_running``. + staging (int): + Output only. The number of instances in the + managed instance group that have STAGING status. + + This field is a member of `oneof`_ ``_staging``. + stopped (int): + Output only. The number of instances in the + managed instance group that have STOPPED status. + + This field is a member of `oneof`_ ``_stopped``. + stopping (int): + Output only. The number of instances in the + managed instance group that have STOPPING + status. + + This field is a member of `oneof`_ ``_stopping``. + suspended (int): + Output only. The number of instances in the + managed instance group that have SUSPENDED + status. + + This field is a member of `oneof`_ ``_suspended``. + suspending (int): + Output only. The number of instances in the + managed instance group that have SUSPENDING + status. + + This field is a member of `oneof`_ ``_suspending``. + terminated (int): + Output only. The number of instances in the + managed instance group that have TERMINATED + status. + + This field is a member of `oneof`_ ``_terminated``. + """ + + deprovisioning: int = proto.Field( + proto.INT32, + number=462169582, + optional=True, + ) + non_existent: int = proto.Field( + proto.INT32, + number=468485062, + optional=True, + ) + pending: int = proto.Field( + proto.INT32, + number=391154071, + optional=True, + ) + pending_stop: int = proto.Field( + proto.INT32, + number=163159466, + optional=True, + ) + provisioning: int = proto.Field( + proto.INT32, + number=121098989, + optional=True, + ) + repairing: int = proto.Field( + proto.INT32, + number=311243061, + optional=True, + ) + running: int = proto.Field( + proto.INT32, + number=477042111, + optional=True, + ) + staging: int = proto.Field( + proto.INT32, + number=249960507, + optional=True, + ) + stopped: int = proto.Field( + proto.INT32, + number=263164365, + optional=True, + ) + stopping: int = proto.Field( + proto.INT32, + number=105035892, + optional=True, + ) + suspended: int = proto.Field( + proto.INT32, + number=485854683, + optional=True, + ) + suspending: int = proto.Field( + proto.INT32, + number=29113894, + optional=True, + ) + terminated: int = proto.Field( + proto.INT32, + number=301796899, + optional=True, + ) + + class InstanceGroupManagerStatusStateful(proto.Message): r""" @@ -58733,17 +61490,19 @@ class InstanceGroupManagerStatusStateful(proto.Message): Attributes: has_stateful_config (bool): - Output only. [Output Only] A bit indicating whether the - managed instance group has stateful configuration, that is, - if you have configured any items in a stateful policy or in - per-instance configs. The group might report that it has no - stateful configuration even when there is still some - preserved state on a managed instance, for example, if you - have deleted all PICs but not yet applied those deletions. + Output only. A bit indicating whether the + managed instance group has stateful + configuration, that is, if you have configured + any items in a stateful policy or in + per-instance configs. The group might report + that it has no stateful configuration even when + there is still some preserved state on a managed + instance, for example, if you have deleted all + PICs but not yet applied those deletions. This field is a member of `oneof`_ ``_has_stateful_config``. per_instance_configs (google.cloud.compute_v1.types.InstanceGroupManagerStatusStatefulPerInstanceConfigs): - Output only. [Output Only] Status of per-instance + Output only. Status of per-instance configurations on the instances. This field is a member of `oneof`_ ``_per_instance_configs``. @@ -58794,11 +61553,11 @@ class InstanceGroupManagerStatusVersionTarget(proto.Message): Attributes: is_reached (bool): - Output only. [Output Only] A bit indicating whether version - target has been reached in this managed instance group, i.e. - all instances are in their target version. Instances' target - version are specified byversion field on Instance Group - Manager. + Output only. A bit indicating whether version + target has been reached in this managed instance + group, i.e. all instances are in their target + version. Instances' target version are specified + byversion field on Instance Group Manager. This field is a member of `oneof`_ ``_is_reached``. """ @@ -61993,6 +64752,21 @@ class InstantSnapshot(proto.Message): current or a previous instance of a given disk name. This field is a member of `oneof`_ ``_source_disk_id``. + source_instant_snapshot_group (str): + Output only. [Output Only] URL of the source instant + snapshot this instant snapshot is part of. Note that the + source instant snapshot group must be in the same + zone/region as the instant snapshot to be created. This can + be a full or valid partial URL. + + This field is a member of `oneof`_ ``_source_instant_snapshot_group``. + source_instant_snapshot_group_id (str): + Output only. [Output Only] The ID value of the source + instant snapshot group this InstantSnapshot is part of. This + value may be used to determine whether the InstantSnapshot + was created as part of an InstantSnapshotGroup creation. + + This field is a member of `oneof`_ ``_source_instant_snapshot_group_id``. status (str): Output only. [Output Only] The status of the instantSnapshot. This can beCREATING, DELETING, FAILED, @@ -62152,6 +64926,16 @@ class Status(proto.Enum): number=454190809, optional=True, ) + source_instant_snapshot_group: str = proto.Field( + proto.STRING, + number=525991686, + optional=True, + ) + source_instant_snapshot_group_id: str = proto.Field( + proto.STRING, + number=167103732, + optional=True, + ) status: str = proto.Field( proto.STRING, number=181260274, @@ -62248,6 +65032,263 @@ def raw_page(self): ) +class InstantSnapshotGroup(proto.Message): + r"""Represents an InstantSnapshotGroup resource. + + An instant snapshot group is a set of instant snapshots that + represents a point in time state of a consistency group. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + Output only. [Output Only] Creation timestamp inRFC3339 text + format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + Optional. An optional description of this + resource. Provide this property when you create + the resource. + + This field is a member of `oneof`_ ``_description``. + id (int): + Output only. [Output Only] The unique identifier for the + resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + kind (str): + Output only. [Output Only] Type of the resource. + Alwayscompute#instantSnapshotGroup for InstantSnapshotGroup + resources. + + This field is a member of `oneof`_ ``_kind``. + name (str): + Identifier. Name of the resource; provided by the client + when the resource is created. The name must be 1-63 + characters long, and comply withRFC1035. Specifically, the + name must be 1-63 characters long and match the regular + expression ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the + first character must be a lowercase letter, and all + following characters must be a dash, lowercase letter, or + digit, except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + region (str): + Output only. [Output Only] URL of the region where the + instant snapshot group resides. You must specify this field + as part of the HTTP request URL. It is not settable as a + field in the request body. + + This field is a member of `oneof`_ ``_region``. + resource_status (google.cloud.compute_v1.types.InstantSnapshotGroupResourceStatus): + + This field is a member of `oneof`_ ``_resource_status``. + self_link (str): + Output only. [Output Only] Server-defined URL for the + resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + Output only. [Output Only] Server-defined URL for this + resource's resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + source_consistency_group (str): + + This field is a member of `oneof`_ ``_source_consistency_group``. + status (str): + Output only. [Output Only] Check the Status enum for the + list of possible values. + + This field is a member of `oneof`_ ``_status``. + zone (str): + Output only. [Output Only] URL of the zone where the instant + snapshot group resides. You must specify this field as part + of the HTTP request URL. It is not settable as a field in + the request body. + + This field is a member of `oneof`_ ``_zone``. + """ + + class Status(proto.Enum): + r"""Output only. [Output Only] + + Values: + UNDEFINED_STATUS (0): + A value indicating that the enum field is not + set. + CREATING (455564985): + No description available. + DELETING (528602024): + No description available. + FAILED (455706685): + No description available. + INVALID (530283991): + No description available. + READY (77848963): + No description available. + UNKNOWN (433141802): + No description available. + """ + + UNDEFINED_STATUS = 0 + CREATING = 455564985 + DELETING = 528602024 + FAILED = 455706685 + INVALID = 530283991 + READY = 77848963 + UNKNOWN = 433141802 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + resource_status: "InstantSnapshotGroupResourceStatus" = proto.Field( + proto.MESSAGE, + number=249429315, + optional=True, + message="InstantSnapshotGroupResourceStatus", + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + source_consistency_group: str = proto.Field( + proto.STRING, + number=531359348, + optional=True, + ) + status: str = proto.Field( + proto.STRING, + number=181260274, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + optional=True, + ) + + +class InstantSnapshotGroupParameters(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + source_instant_snapshot_group (str): + The source instant snapshot group used to + create disks. You can provide this as a partial + or full URL to the resource. For example, the + following are valid values: + + + - + https://www.googleapis.com/compute/v1/projects/project/zones/zone/instantSnapshotGroups/instantSnapshotGroup + - + projects/project/zones/zone/instantSnapshotGroups/instantSnapshotGroup + - + zones/zone/instantSnapshotGroups/instantSnapshotGroup + + This field is a member of `oneof`_ ``_source_instant_snapshot_group``. + """ + + source_instant_snapshot_group: str = proto.Field( + proto.STRING, + number=525991686, + optional=True, + ) + + +class InstantSnapshotGroupResourceStatus(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consistency_membership_resolution_time (str): + Output only. [Output Only] + + This field is a member of `oneof`_ ``_consistency_membership_resolution_time``. + source_info (google.cloud.compute_v1.types.InstantSnapshotGroupSourceInfo): + Output only. [Output Only] + + This field is a member of `oneof`_ ``_source_info``. + """ + + consistency_membership_resolution_time: str = proto.Field( + proto.STRING, + number=346675870, + optional=True, + ) + source_info: "InstantSnapshotGroupSourceInfo" = proto.Field( + proto.MESSAGE, + number=451907154, + optional=True, + message="InstantSnapshotGroupSourceInfo", + ) + + +class InstantSnapshotGroupSourceInfo(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + consistency_group (str): + + This field is a member of `oneof`_ ``_consistency_group``. + consistency_group_id (str): + + This field is a member of `oneof`_ ``_consistency_group_id``. + """ + + consistency_group: str = proto.Field( + proto.STRING, + number=508183960, + optional=True, + ) + consistency_group_id: str = proto.Field( + proto.STRING, + number=85599522, + optional=True, + ) + + class InstantSnapshotList(proto.Message): r"""Contains a list of InstantSnapshot resources. @@ -69826,8 +72867,1031 @@ class ListAcceleratorTypesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. + zone (str): + The name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListAddressesRequest(proto.Message): + r"""A request message for Addresses.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListAssociationsFirewallPolicyRequest(proto.Message): + r"""A request message for FirewallPolicies.ListAssociations. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + include_inherited_policies (bool): + If set to "true", the response will contain a + list of all associations for the containing + folders and the containing organization of the + target. The parameter has no effect if the + target is an organization. + + This field is a member of `oneof`_ ``_include_inherited_policies``. + target_resource (str): + The target resource to list associations. It + is an organization, or a folder. + + This field is a member of `oneof`_ ``_target_resource``. + """ + + include_inherited_policies: bool = proto.Field( + proto.BOOL, + number=32280076, + optional=True, + ) + target_resource: str = proto.Field( + proto.STRING, + number=467318524, + optional=True, + ) + + +class ListAssociationsOrganizationSecurityPolicyRequest(proto.Message): + r"""A request message for + OrganizationSecurityPolicies.ListAssociations. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + target_resource (str): + The target resource to list associations. It + is an organization, or a folder. + + This field is a member of `oneof`_ ``_target_resource``. + """ + + target_resource: str = proto.Field( + proto.STRING, + number=467318524, + optional=True, + ) + + +class ListAutoscalersRequest(proto.Message): + r"""A request message for Autoscalers.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for this request. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): + r"""A request message for + RegionSslPolicies.ListAvailableFeatures. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListAvailableFeaturesSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.ListAvailableFeatures. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListBackendBucketsRequest(proto.Message): + r"""A request message for BackendBuckets.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. """ filter: str = proto.Field( @@ -69859,20 +73923,28 @@ class ListAcceleratorTypesRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListAddressesRequest(proto.Message): - r"""A request message for Addresses.List. See the method +class ListBgpRoutesRoutersRequest(proto.Message): + r"""A request message for Routers.ListBgpRoutes. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + address_family (str): + (Required) limit results to this address + family (either IPv4 or IPv6) Check the + AddressFamily enum for the list of possible + values. + + This field is a member of `oneof`_ ``_address_family``. + destination_prefix (str): + Limit results to destinations that are + subnets of this CIDR range + + This field is a member of `oneof`_ ``_destination_prefix``. filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter @@ -69976,6 +74048,16 @@ class ListAddressesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. + peer (str): + (Required) limit results to the BGP peer with + the given name. Name should conform to RFC1035. + + This field is a member of `oneof`_ ``_peer``. + policy_applied (bool): + When true, the method returns post-policy + routes. Otherwise, it returns pre-policy routes. + + This field is a member of `oneof`_ ``_policy_applied``. project (str): Project ID for this request. region (str): @@ -69991,8 +74073,70 @@ class ListAddressesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + route_type (str): + (Required) limit results to this type of + route (either LEARNED or ADVERTISED) + Check the RouteType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_route_type``. + router (str): + Name or id of the resource for this request. + Name should conform to RFC1035. """ + class AddressFamily(proto.Enum): + r"""(Required) limit results to this address family (either IPv4 + or IPv6) + + Values: + UNDEFINED_ADDRESS_FAMILY (0): + A value indicating that the enum field is not + set. + IPV4 (2254341): + No description available. + IPV6 (2254343): + No description available. + UNSPECIFIED_IP_VERSION (72938440): + No description available. + """ + + UNDEFINED_ADDRESS_FAMILY = 0 + IPV4 = 2254341 + IPV6 = 2254343 + UNSPECIFIED_IP_VERSION = 72938440 + + class RouteType(proto.Enum): + r"""(Required) limit results to this type of route (either + LEARNED or ADVERTISED) + + Values: + UNDEFINED_ROUTE_TYPE (0): + A value indicating that the enum field is not + set. + ADVERTISED (20302109): + No description available. + LEARNED (231892419): + No description available. + UNSPECIFIED_ROUTE_TYPE (248064440): + No description available. + """ + + UNDEFINED_ROUTE_TYPE = 0 + ADVERTISED = 20302109 + LEARNED = 231892419 + UNSPECIFIED_ROUTE_TYPE = 248064440 + + address_family: str = proto.Field( + proto.STRING, + number=173744655, + optional=True, + ) + destination_prefix: str = proto.Field( + proto.STRING, + number=263872483, + optional=True, + ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -70013,6 +74157,16 @@ class ListAddressesRequest(proto.Message): number=19994697, optional=True, ) + peer: str = proto.Field( + proto.STRING, + number=3436898, + optional=True, + ) + policy_applied: bool = proto.Field( + proto.BOOL, + number=379464304, + optional=True, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -70026,68 +74180,176 @@ class ListAddressesRequest(proto.Message): number=517198390, optional=True, ) + route_type: str = proto.Field( + proto.STRING, + number=375888752, + optional=True, + ) + router: str = proto.Field( + proto.STRING, + number=148608841, + ) -class ListAssociationsFirewallPolicyRequest(proto.Message): - r"""A request message for FirewallPolicies.ListAssociations. See - the method description for details. +class ListCrossSiteNetworksRequest(proto.Message): + r"""A request message for CrossSiteNetworks.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - include_inherited_policies (bool): - If set to "true", the response will contain a - list of all associations for the containing - folders and the containing organization of the - target. The parameter has no effect if the - target is an organization. + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. - This field is a member of `oneof`_ ``_include_inherited_policies``. - target_resource (str): - The target resource to list associations. It - is an organization, or a folder. + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. - This field is a member of `oneof`_ ``_target_resource``. - """ + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. - include_inherited_policies: bool = proto.Field( - proto.BOOL, - number=32280076, - optional=True, - ) - target_resource: str = proto.Field( - proto.STRING, - number=467318524, - optional=True, - ) + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + :: -class ListAssociationsOrganizationSecurityPolicyRequest(proto.Message): - r"""A request message for - OrganizationSecurityPolicies.ListAssociations. See the method - description for details. + labels.owner:* + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + To filter on multiple expressions, provide each separate + expression within parentheses. For example: - Attributes: - target_resource (str): - The target resource to list associations. It - is an organization, or a folder. + :: - This field is a member of `oneof`_ ``_target_resource``. + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. """ - target_resource: str = proto.Field( + filter: str = proto.Field( proto.STRING, - number=467318524, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, optional=True, ) -class ListAutoscalersRequest(proto.Message): - r"""A request message for Autoscalers.List. See the method +class ListDiskTypesRequest(proto.Message): + r"""A request message for DiskTypes.List. See the method description for details. @@ -70211,7 +74473,7 @@ class ListAutoscalersRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - Name of the zone for this request. + The name of the zone for this request. """ filter: str = proto.Field( @@ -70249,10 +74511,9 @@ class ListAutoscalersRequest(proto.Message): ) -class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): - r"""A request message for - RegionSslPolicies.ListAvailableFeatures. See the method - description for details. +class ListDisksRequest(proto.Message): + r"""A request message for Disks.List. See the method description + for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -70363,8 +74624,6 @@ class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -70376,6 +74635,8 @@ class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -70402,20 +74663,20 @@ class ListAvailableFeaturesRegionSslPoliciesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListAvailableFeaturesSslPoliciesRequest(proto.Message): - r"""A request message for SslPolicies.ListAvailableFeatures. See - the method description for details. +class ListDisksStoragePoolsRequest(proto.Message): + r"""A request message for StoragePools.ListDisks. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -70537,6 +74798,10 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + storage_pool (str): + Name of the storage pool to list disks of. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -70568,168 +74833,19 @@ class ListAvailableFeaturesSslPoliciesRequest(proto.Message): number=517198390, optional=True, ) - - -class ListBackendBucketsRequest(proto.Message): - r"""A request message for BackendBuckets.List. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - - This field is a member of `oneof`_ ``_return_partial_success``. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( + storage_pool: str = proto.Field( proto.STRING, - number=19994697, - optional=True, + number=360473440, ) - project: str = proto.Field( + zone: str = proto.Field( proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, + number=3744684, ) -class ListBackendServicesRequest(proto.Message): - r"""A request message for BackendServices.List. See the method - description for details. +class ListErrorsInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.ListErrors. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -70806,6 +74922,11 @@ class ListBackendServicesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -70851,6 +74972,10 @@ class ListBackendServicesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of thezone where the managed + instance group is located. + It should conform to RFC1035. """ filter: str = proto.Field( @@ -70858,6 +74983,10 @@ class ListBackendServicesRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -70882,28 +75011,20 @@ class ListBackendServicesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListBgpRoutesRoutersRequest(proto.Message): - r"""A request message for Routers.ListBgpRoutes. See the method - description for details. +class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.ListErrors. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - address_family (str): - (Required) limit results to this address - family (either IPv4 or IPv6) Check the - AddressFamily enum for the list of possible - values. - - This field is a member of `oneof`_ ``_address_family``. - destination_prefix (str): - Limit results to destinations that are - subnets of this CIDR range - - This field is a member of `oneof`_ ``_destination_prefix``. filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter @@ -70975,6 +75096,11 @@ class ListBgpRoutesRoutersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It must be a string + that meets the requirements in RFC1035, or an unsigned long + integer: must match regexp pattern: + (?:`a-z `__?)|1-9{0,19}. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -71007,20 +75133,11 @@ class ListBgpRoutesRoutersRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - peer (str): - (Required) limit results to the BGP peer with - the given name. Name should conform to RFC1035. - - This field is a member of `oneof`_ ``_peer``. - policy_applied (bool): - When true, the method returns post-policy - routes. Otherwise, it returns pre-policy routes. - - This field is a member of `oneof`_ ``_policy_applied``. project (str): Project ID for this request. region (str): - Name of the region for this request. + Name of the region scoping this request. + This should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -71032,75 +75149,17 @@ class ListBgpRoutesRoutersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - route_type (str): - (Required) limit results to this type of - route (either LEARNED or ADVERTISED) - Check the RouteType enum for the list of - possible values. - - This field is a member of `oneof`_ ``_route_type``. - router (str): - Name or id of the resource for this request. - Name should conform to RFC1035. """ - class AddressFamily(proto.Enum): - r"""(Required) limit results to this address family (either IPv4 - or IPv6) - - Values: - UNDEFINED_ADDRESS_FAMILY (0): - A value indicating that the enum field is not - set. - IPV4 (2254341): - No description available. - IPV6 (2254343): - No description available. - UNSPECIFIED_IP_VERSION (72938440): - No description available. - """ - - UNDEFINED_ADDRESS_FAMILY = 0 - IPV4 = 2254341 - IPV6 = 2254343 - UNSPECIFIED_IP_VERSION = 72938440 - - class RouteType(proto.Enum): - r"""(Required) limit results to this type of route (either - LEARNED or ADVERTISED) - - Values: - UNDEFINED_ROUTE_TYPE (0): - A value indicating that the enum field is not - set. - ADVERTISED (20302109): - No description available. - LEARNED (231892419): - No description available. - UNSPECIFIED_ROUTE_TYPE (248064440): - No description available. - """ - - UNDEFINED_ROUTE_TYPE = 0 - ADVERTISED = 20302109 - LEARNED = 231892419 - UNSPECIFIED_ROUTE_TYPE = 248064440 - - address_family: str = proto.Field( - proto.STRING, - number=173744655, - optional=True, - ) - destination_prefix: str = proto.Field( - proto.STRING, - number=263872483, - optional=True, - ) filter: str = proto.Field( proto.STRING, number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -71116,16 +75175,6 @@ class RouteType(proto.Enum): number=19994697, optional=True, ) - peer: str = proto.Field( - proto.STRING, - number=3436898, - optional=True, - ) - policy_applied: bool = proto.Field( - proto.BOOL, - number=379464304, - optional=True, - ) project: str = proto.Field( proto.STRING, number=227560217, @@ -71139,20 +75188,11 @@ class RouteType(proto.Enum): number=517198390, optional=True, ) - route_type: str = proto.Field( - proto.STRING, - number=375888752, - optional=True, - ) - router: str = proto.Field( - proto.STRING, - number=148608841, - ) -class ListCrossSiteNetworksRequest(proto.Message): - r"""A request message for CrossSiteNetworks.List. See the method - description for details. +class ListExternalVpnGatewaysRequest(proto.Message): + r"""A request message for ExternalVpnGateways.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -71307,8 +75347,8 @@ class ListCrossSiteNetworksRequest(proto.Message): ) -class ListDiskTypesRequest(proto.Message): - r"""A request message for DiskTypes.List. See the method +class ListFirewallPoliciesRequest(proto.Message): + r"""A request message for FirewallPolicies.List. See the method description for details. @@ -71418,8 +75458,13 @@ class ListDiskTypesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. + parent_id (str): + Parent ID for this request. The ID can be either be + "folders/[FOLDER_ID]" if the parent is a folder or + "organizations/[ORGANIZATION_ID]" if the parent is an + organization. + + This field is a member of `oneof`_ ``_parent_id``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -71431,8 +75476,6 @@ class ListDiskTypesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -71455,24 +75498,21 @@ class ListDiskTypesRequest(proto.Message): number=19994697, optional=True, ) - project: str = proto.Field( + parent_id: str = proto.Field( proto.STRING, - number=227560217, + number=459714768, + optional=True, ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListDisksRequest(proto.Message): - r"""A request message for Disks.List. See the method description - for details. +class ListFirewallsRequest(proto.Message): + r"""A request message for Firewalls.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -71594,8 +75634,6 @@ class ListDisksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -71627,14 +75665,10 @@ class ListDisksRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListDisksStoragePoolsRequest(proto.Message): - r"""A request message for StoragePools.ListDisks. See the method +class ListForwardingRulesRequest(proto.Message): + r"""A request message for ForwardingRules.List. See the method description for details. @@ -71746,6 +75780,8 @@ class ListDisksStoragePoolsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -71757,10 +75793,6 @@ class ListDisksStoragePoolsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - storage_pool (str): - Name of the storage pool to list disks of. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -71787,24 +75819,20 @@ class ListDisksStoragePoolsRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - storage_pool: str = proto.Field( - proto.STRING, - number=360473440, - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListErrorsInstanceGroupManagersRequest(proto.Message): - r"""A request message for InstanceGroupManagers.ListErrors. See - the method description for details. +class ListFutureReservationsRequest(proto.Message): + r"""A request message for FutureReservations.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -71881,11 +75909,6 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It must be a string - that meets the requirements in RFC1035, or an unsigned long - integer: must match regexp pattern: - (?:`a-z `__?)|1-9{0,19}. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -71932,9 +75955,8 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of thezone where the managed - instance group is located. - It should conform to RFC1035. + Name of the zone for this request. Name + should conform to RFC1035. """ filter: str = proto.Field( @@ -71942,10 +75964,6 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -71976,9 +75994,9 @@ class ListErrorsInstanceGroupManagersRequest(proto.Message): ) -class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): - r"""A request message for RegionInstanceGroupManagers.ListErrors. - See the method description for details. +class ListGlobalAddressesRequest(proto.Message): + r"""A request message for GlobalAddresses.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -72055,11 +76073,6 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It must be a string - that meets the requirements in RFC1035, or an unsigned long - integer: must match regexp pattern: - (?:`a-z `__?)|1-9{0,19}. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -72094,9 +76107,6 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. - This should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -72115,10 +76125,6 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -72138,10 +76144,6 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -72149,8 +76151,8 @@ class ListErrorsRegionInstanceGroupManagersRequest(proto.Message): ) -class ListExternalVpnGatewaysRequest(proto.Message): - r"""A request message for ExternalVpnGateways.List. See the +class ListGlobalForwardingRulesRequest(proto.Message): + r"""A request message for GlobalForwardingRules.List. See the method description for details. @@ -72260,170 +76262,8 @@ class ListExternalVpnGatewaysRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - - This field is a member of `oneof`_ ``_return_partial_success``. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - - -class ListFirewallPoliciesRequest(proto.Message): - r"""A request message for FirewallPolicies.List. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - parent_id (str): - Parent ID for this request. The ID can be either be - "folders/[FOLDER_ID]" if the parent is a folder or - "organizations/[ORGANIZATION_ID]" if the parent is an - organization. - - This field is a member of `oneof`_ ``_parent_id``. + project (str): + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -72457,10 +76297,9 @@ class ListFirewallPoliciesRequest(proto.Message): number=19994697, optional=True, ) - parent_id: str = proto.Field( + project: str = proto.Field( proto.STRING, - number=459714768, - optional=True, + number=227560217, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -72469,9 +76308,9 @@ class ListFirewallPoliciesRequest(proto.Message): ) -class ListFirewallsRequest(proto.Message): - r"""A request message for Firewalls.List. See the method - description for details. +class ListGlobalNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for GlobalNetworkEndpointGroups.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -72626,8 +76465,8 @@ class ListFirewallsRequest(proto.Message): ) -class ListForwardingRulesRequest(proto.Message): - r"""A request message for ForwardingRules.List. See the method +class ListGlobalOperationsRequest(proto.Message): + r"""A request message for GlobalOperations.List. See the method description for details. @@ -72739,8 +76578,6 @@ class ListForwardingRulesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -72778,9 +76615,165 @@ class ListForwardingRulesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListGlobalOrganizationOperationsRequest(proto.Message): + r"""A request message for GlobalOrganizationOperations.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( proto.STRING, - number=138946292, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + optional=True, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -72789,9 +76782,9 @@ class ListForwardingRulesRequest(proto.Message): ) -class ListFutureReservationsRequest(proto.Message): - r"""A request message for FutureReservations.List. See the method - description for details. +class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for GlobalPublicDelegatedPrefixes.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -72913,9 +76906,6 @@ class ListFutureReservationsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - Name of the zone for this request. Name - should conform to RFC1035. """ filter: str = proto.Field( @@ -72947,14 +76937,10 @@ class ListFutureReservationsRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListGlobalAddressesRequest(proto.Message): - r"""A request message for GlobalAddresses.List. See the method +class ListHealthChecksRequest(proto.Message): + r"""A request message for HealthChecks.List. See the method description for details. @@ -73110,9 +77096,9 @@ class ListGlobalAddressesRequest(proto.Message): ) -class ListGlobalForwardingRulesRequest(proto.Message): - r"""A request message for GlobalForwardingRules.List. See the - method description for details. +class ListImagesRequest(proto.Message): + r"""A request message for Images.List. See the method description + for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -73267,9 +77253,10 @@ class ListGlobalForwardingRulesRequest(proto.Message): ) -class ListGlobalNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for GlobalNetworkEndpointGroups.List. See - the method description for details. +class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): + r"""A request message for + InstanceGroupManagerResizeRequests.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -73346,6 +77333,9 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. The + name should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -73391,6 +77381,10 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of thezone where the managed + instance group is located. The name should + conform to RFC1035. """ filter: str = proto.Field( @@ -73398,6 +77392,10 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -73422,11 +77420,15 @@ class ListGlobalNetworkEndpointGroupsRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListGlobalOperationsRequest(proto.Message): - r"""A request message for GlobalOperations.List. See the method - description for details. +class ListInstanceGroupManagersRequest(proto.Message): + r"""A request message for InstanceGroupManagers.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -73548,6 +77550,9 @@ class ListGlobalOperationsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of thezone where the managed + instance group is located. """ filter: str = proto.Field( @@ -73579,11 +77584,15 @@ class ListGlobalOperationsRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListGlobalOrganizationOperationsRequest(proto.Message): - r"""A request message for GlobalOrganizationOperations.List. See - the method description for details. +class ListInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -73692,10 +77701,8 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_id (str): - Parent ID for this request. - - This field is a member of `oneof`_ ``_parent_id``. + project (str): + Project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -73707,6 +77714,9 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of thezone + where the instance group is located. """ filter: str = proto.Field( @@ -73729,21 +77739,24 @@ class ListGlobalOrganizationOperationsRequest(proto.Message): number=19994697, optional=True, ) - parent_id: str = proto.Field( + project: str = proto.Field( proto.STRING, - number=459714768, - optional=True, + number=227560217, ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): - r"""A request message for GlobalPublicDelegatedPrefixes.List. See - the method description for details. +class ListInstanceTemplatesRequest(proto.Message): + r"""A request message for InstanceTemplates.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -73898,9 +77911,9 @@ class ListGlobalPublicDelegatedPrefixesRequest(proto.Message): ) -class ListHealthChecksRequest(proto.Message): - r"""A request message for HealthChecks.List. See the method - description for details. +class ListInstancesInstanceGroupsRequest(proto.Message): + r"""A request message for InstanceGroups.ListInstances. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -73977,6 +77990,11 @@ class ListHealthChecksRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group (str): + The name of the instance group from which you + want to generate a list of included instances. + instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest): + The body resource for this request max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -74022,6 +78040,9 @@ class ListHealthChecksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone + where the instance group is located. """ filter: str = proto.Field( @@ -74029,6 +78050,15 @@ class ListHealthChecksRequest(proto.Message): number=336120696, optional=True, ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) + instance_groups_list_instances_request_resource: "InstanceGroupsListInstancesRequest" = proto.Field( + proto.MESSAGE, + number=476255263, + message="InstanceGroupsListInstancesRequest", + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -74053,11 +78083,15 @@ class ListHealthChecksRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListImagesRequest(proto.Message): - r"""A request message for Images.List. See the method description - for details. +class ListInstancesRegionInstanceGroupsRequest(proto.Message): + r"""A request message for RegionInstanceGroups.ListInstances. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -74134,6 +78168,9 @@ class ListImagesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group (str): + Name of the regional instance group for which + we want to list the instances. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -74168,6 +78205,10 @@ class ListImagesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. + region_instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest): + The body resource for this request return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -74186,6 +78227,10 @@ class ListImagesRequest(proto.Message): number=336120696, optional=True, ) + instance_group: str = proto.Field( + proto.STRING, + number=81095253, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -74205,6 +78250,15 @@ class ListImagesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_instance_groups_list_instances_request_resource: "RegionInstanceGroupsListInstancesRequest" = proto.Field( + proto.MESSAGE, + number=48239828, + message="RegionInstanceGroupsListInstancesRequest", + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -74212,9 +78266,8 @@ class ListImagesRequest(proto.Message): ) -class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): - r"""A request message for - InstanceGroupManagerResizeRequests.List. See the method +class ListInstancesRequest(proto.Message): + r"""A request message for Instances.List. See the method description for details. @@ -74292,9 +78345,6 @@ class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. The - name should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -74341,9 +78391,7 @@ class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of thezone where the managed - instance group is located. The name should - conform to RFC1035. + The name of the zone for this request. """ filter: str = proto.Field( @@ -74351,10 +78399,6 @@ class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -74385,8 +78429,97 @@ class ListInstanceGroupManagerResizeRequestsRequest(proto.Message): ) -class ListInstanceGroupManagersRequest(proto.Message): - r"""A request message for InstanceGroupManagers.List. See the +class ListInstantSnapshotGroups(proto.Message): + r"""Contains a list of InstantSnapshotGroup resources. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + [Output Only] Unique identifier for the resource; defined by + the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstantSnapshotGroup]): + A list of InstantSnapshotGroup resources. + kind (str): + Output only. Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + [Output Only] This token allows you to get the next page of + results for list requests. If the number of results is + larger thanmaxResults, use the nextPageToken as a value for + the query parameter pageToken in the next list request. + Subsequent list requests will have their own nextPageToken + to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + Output only. [Output Only] Server-defined URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + Output only. [Output Only] Unreachable resources. + end_interface: MixerListResponseWithEtagBuilder + warning (google.cloud.compute_v1.types.Warning): + [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["InstantSnapshotGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="InstantSnapshotGroup", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + +class ListInstantSnapshotGroupsRequest(proto.Message): + r"""A request message for InstantSnapshotGroups.List. See the method description for details. @@ -74510,8 +78643,7 @@ class ListInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of thezone where the managed - instance group is located. + The name of the zone for this request. """ filter: str = proto.Field( @@ -74549,8 +78681,8 @@ class ListInstanceGroupManagersRequest(proto.Message): ) -class ListInstanceGroupsRequest(proto.Message): - r"""A request message for InstanceGroups.List. See the method +class ListInstantSnapshotsRequest(proto.Message): + r"""A request message for InstantSnapshots.List. See the method description for details. @@ -74674,8 +78806,7 @@ class ListInstanceGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of thezone - where the instance group is located. + The name of the zone for this request. """ filter: str = proto.Field( @@ -74713,9 +78844,9 @@ class ListInstanceGroupsRequest(proto.Message): ) -class ListInstanceTemplatesRequest(proto.Message): - r"""A request message for InstanceTemplates.List. See the method - description for details. +class ListInterconnectAttachmentGroupsRequest(proto.Message): + r"""A request message for InterconnectAttachmentGroups.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -74870,8 +79001,8 @@ class ListInstanceTemplatesRequest(proto.Message): ) -class ListInstancesInstanceGroupsRequest(proto.Message): - r"""A request message for InstanceGroups.ListInstances. See the +class ListInterconnectAttachmentsRequest(proto.Message): + r"""A request message for InterconnectAttachments.List. See the method description for details. @@ -74949,11 +79080,6 @@ class ListInstancesInstanceGroupsRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group (str): - The name of the instance group from which you - want to generate a list of included instances. - instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.InstanceGroupsListInstancesRequest): - The body resource for this request max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -74988,6 +79114,8 @@ class ListInstancesInstanceGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -74999,9 +79127,6 @@ class ListInstancesInstanceGroupsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone - where the instance group is located. """ filter: str = proto.Field( @@ -75009,15 +79134,6 @@ class ListInstancesInstanceGroupsRequest(proto.Message): number=336120696, optional=True, ) - instance_group: str = proto.Field( - proto.STRING, - number=81095253, - ) - instance_groups_list_instances_request_resource: "InstanceGroupsListInstancesRequest" = proto.Field( - proto.MESSAGE, - number=476255263, - message="InstanceGroupsListInstancesRequest", - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -75037,20 +79153,20 @@ class ListInstancesInstanceGroupsRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListInstancesRegionInstanceGroupsRequest(proto.Message): - r"""A request message for RegionInstanceGroups.ListInstances. See - the method description for details. +class ListInterconnectGroupsRequest(proto.Message): + r"""A request message for InterconnectGroups.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -75127,9 +79243,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group (str): - Name of the regional instance group for which - we want to list the instances. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -75164,10 +79277,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. - region_instance_groups_list_instances_request_resource (google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest): - The body resource for this request return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -75186,10 +79295,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): number=336120696, optional=True, ) - instance_group: str = proto.Field( - proto.STRING, - number=81095253, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -75209,15 +79314,6 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - region_instance_groups_list_instances_request_resource: "RegionInstanceGroupsListInstancesRequest" = proto.Field( - proto.MESSAGE, - number=48239828, - message="RegionInstanceGroupsListInstancesRequest", - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -75225,9 +79321,9 @@ class ListInstancesRegionInstanceGroupsRequest(proto.Message): ) -class ListInstancesRequest(proto.Message): - r"""A request message for Instances.List. See the method - description for details. +class ListInterconnectLocationsRequest(proto.Message): + r"""A request message for InterconnectLocations.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -75349,8 +79445,6 @@ class ListInstancesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -75382,15 +79476,11 @@ class ListInstancesRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListInstantSnapshotsRequest(proto.Message): - r"""A request message for InstantSnapshots.List. See the method - description for details. +class ListInterconnectRemoteLocationsRequest(proto.Message): + r"""A request message for InterconnectRemoteLocations.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -75512,8 +79602,6 @@ class ListInstantSnapshotsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -75545,15 +79633,11 @@ class ListInstantSnapshotsRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListInterconnectAttachmentGroupsRequest(proto.Message): - r"""A request message for InterconnectAttachmentGroups.List. See - the method description for details. +class ListInterconnectsRequest(proto.Message): + r"""A request message for Interconnects.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -75708,9 +79792,9 @@ class ListInterconnectAttachmentGroupsRequest(proto.Message): ) -class ListInterconnectAttachmentsRequest(proto.Message): - r"""A request message for InterconnectAttachments.List. See the - method description for details. +class ListLicensesRequest(proto.Message): + r"""A request message for Licenses.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -75821,8 +79905,6 @@ class ListInterconnectAttachmentsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -75860,10 +79942,6 @@ class ListInterconnectAttachmentsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -75871,8 +79949,8 @@ class ListInterconnectAttachmentsRequest(proto.Message): ) -class ListInterconnectGroupsRequest(proto.Message): - r"""A request message for InterconnectGroups.List. See the method +class ListMachineImagesRequest(proto.Message): + r"""A request message for MachineImages.List. See the method description for details. @@ -76028,9 +80106,9 @@ class ListInterconnectGroupsRequest(proto.Message): ) -class ListInterconnectLocationsRequest(proto.Message): - r"""A request message for InterconnectLocations.List. See the - method description for details. +class ListMachineTypesRequest(proto.Message): + r"""A request message for MachineTypes.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -76152,6 +80230,8 @@ class ListInterconnectLocationsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -76183,167 +80263,15 @@ class ListInterconnectLocationsRequest(proto.Message): number=517198390, optional=True, ) - - -class ListInterconnectRemoteLocationsRequest(proto.Message): - r"""A request message for InterconnectRemoteLocations.List. See - the method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - - This field is a member of `oneof`_ ``_return_partial_success``. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( + zone: str = proto.Field( proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, + number=3744684, ) -class ListInterconnectsRequest(proto.Message): - r"""A request message for Interconnects.List. See the method +class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.ListManagedInstances. See the method description for details. @@ -76421,6 +80349,8 @@ class ListInterconnectsRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -76466,6 +80396,9 @@ class ListInterconnectsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of thezone where the managed + instance group is located. """ filter: str = proto.Field( @@ -76473,6 +80406,10 @@ class ListInterconnectsRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -76497,10 +80434,15 @@ class ListInterconnectsRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListLicensesRequest(proto.Message): - r"""A request message for Licenses.List. See the method +class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.ListManagedInstances. See the method description for details. @@ -76578,6 +80520,8 @@ class ListLicensesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -76612,6 +80556,8 @@ class ListLicensesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -76630,6 +80576,10 @@ class ListLicensesRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -76649,6 +80599,10 @@ class ListLicensesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -76656,8 +80610,8 @@ class ListLicensesRequest(proto.Message): ) -class ListMachineImagesRequest(proto.Message): - r"""A request message for MachineImages.List. See the method +class ListNetworkAttachmentsRequest(proto.Message): + r"""A request message for NetworkAttachments.List. See the method description for details. @@ -76769,6 +80723,8 @@ class ListMachineImagesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region of this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -76806,6 +80762,10 @@ class ListMachineImagesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -76813,9 +80773,9 @@ class ListMachineImagesRequest(proto.Message): ) -class ListMachineTypesRequest(proto.Message): - r"""A request message for MachineTypes.List. See the method - description for details. +class ListNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for NetworkEndpointGroups.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -76938,7 +80898,9 @@ class ListMachineTypesRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of the zone for this request. + The name of thezone + where the network endpoint group is located. It + should comply with RFC1035. """ filter: str = proto.Field( @@ -76976,9 +80938,9 @@ class ListMachineTypesRequest(proto.Message): ) -class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): +class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): r"""A request message for - InstanceGroupManagers.ListManagedInstances. See the method + GlobalNetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. @@ -77056,8 +81018,6 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -77067,6 +81027,11 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + network_endpoint_group (str): + The name of the network endpoint group from + which you want to generate a list of included + network endpoints. It should comply with + RFC1035. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -77103,9 +81068,6 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of thezone where the managed - instance group is located. """ filter: str = proto.Field( @@ -77113,15 +81075,15 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, optional=True, ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -77141,15 +81103,11 @@ class ListManagedInstancesInstanceGroupManagersRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): +class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): r"""A request message for - RegionInstanceGroupManagers.ListManagedInstances. See the method + NetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. @@ -77227,8 +81185,6 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -77238,6 +81194,13 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + network_endpoint_group (str): + The name of the network endpoint group from + which you want to generate a list of included + network endpoints. It should comply with + RFC1035. + network_endpoint_groups_list_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest): + The body resource for this request order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -77263,8 +81226,6 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -77276,6 +81237,10 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone where + the network endpoint group is located. It should + comply with RFC1035. """ filter: str = proto.Field( @@ -77283,15 +81248,20 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, optional=True, ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) + network_endpoint_groups_list_endpoints_request_resource: "NetworkEndpointGroupsListEndpointsRequest" = proto.Field( + proto.MESSAGE, + number=59493390, + message="NetworkEndpointGroupsListEndpointsRequest", + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -77306,19 +81276,20 @@ class ListManagedInstancesRegionInstanceGroupManagersRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListNetworkAttachmentsRequest(proto.Message): - r"""A request message for NetworkAttachments.List. See the method +class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for + RegionNetworkEndpointGroups.ListNetworkEndpoints. See the method description for details. @@ -77405,6 +81376,11 @@ class ListNetworkAttachmentsRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + network_endpoint_group (str): + The name of the network endpoint group from + which you want to generate a list of included + network endpoints. It should comply with + RFC1035. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -77431,7 +81407,9 @@ class ListNetworkAttachmentsRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region of this request. + The name of theregion + where the network endpoint group is located. It + should comply with RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -77455,6 +81433,10 @@ class ListNetworkAttachmentsRequest(proto.Message): number=54715419, optional=True, ) + network_endpoint_group: str = proto.Field( + proto.STRING, + number=433907078, + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -77480,8 +81462,8 @@ class ListNetworkAttachmentsRequest(proto.Message): ) -class ListNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for NetworkEndpointGroups.List. See the +class ListNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for NetworkFirewallPolicies.List. See the method description for details. @@ -77603,177 +81585,6 @@ class ListNetworkEndpointGroupsRequest(proto.Message): either returns all resources in the zone or no resources, with an error code. - This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of thezone - where the network endpoint group is located. It - should comply with RFC1035. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) - - -class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for - GlobalNetworkEndpointGroups.ListNetworkEndpoints. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - network_endpoint_group (str): - The name of the network endpoint group from - which you want to generate a list of included - network endpoints. It should comply with - RFC1035. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - This field is a member of `oneof`_ ``_return_partial_success``. """ @@ -77787,10 +81598,6 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): number=54715419, optional=True, ) - network_endpoint_group: str = proto.Field( - proto.STRING, - number=433907078, - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -77812,9 +81619,8 @@ class ListNetworkEndpointsGlobalNetworkEndpointGroupsRequest(proto.Message): ) -class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for - NetworkEndpointGroups.ListNetworkEndpoints. See the method +class ListNetworkProfilesRequest(proto.Message): + r"""A request message for NetworkProfiles.List. See the method description for details. @@ -77901,13 +81707,6 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. - network_endpoint_group (str): - The name of the network endpoint group from - which you want to generate a list of included - network endpoints. It should comply with - RFC1035. - network_endpoint_groups_list_endpoints_request_resource (google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest): - The body resource for this request order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -77944,10 +81743,6 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone where - the network endpoint group is located. It should - comply with RFC1035. """ filter: str = proto.Field( @@ -77960,15 +81755,6 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): number=54715419, optional=True, ) - network_endpoint_group: str = proto.Field( - proto.STRING, - number=433907078, - ) - network_endpoint_groups_list_endpoints_request_resource: "NetworkEndpointGroupsListEndpointsRequest" = proto.Field( - proto.MESSAGE, - number=59493390, - message="NetworkEndpointGroupsListEndpointsRequest", - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -77988,15 +81774,10 @@ class ListNetworkEndpointsNetworkEndpointGroupsRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for - RegionNetworkEndpointGroups.ListNetworkEndpoints. See the method +class ListNetworksRequest(proto.Message): + r"""A request message for Networks.List. See the method description for details. @@ -78083,11 +81864,6 @@ class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. - network_endpoint_group (str): - The name of the network endpoint group from - which you want to generate a list of included - network endpoints. It should comply with - RFC1035. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -78113,10 +81889,6 @@ class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - The name of theregion - where the network endpoint group is located. It - should comply with RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -78140,10 +81912,6 @@ class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): number=54715419, optional=True, ) - network_endpoint_group: str = proto.Field( - proto.STRING, - number=433907078, - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -78158,10 +81926,6 @@ class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -78169,9 +81933,9 @@ class ListNetworkEndpointsRegionNetworkEndpointGroupsRequest(proto.Message): ) -class ListNetworkFirewallPoliciesRequest(proto.Message): - r"""A request message for NetworkFirewallPolicies.List. See the - method description for details. +class ListNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -78293,6 +82057,8 @@ class ListNetworkFirewallPoliciesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -78324,10 +82090,14 @@ class ListNetworkFirewallPoliciesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListNetworkProfilesRequest(proto.Message): - r"""A request message for NetworkProfiles.List. See the method +class ListNodeTemplatesRequest(proto.Message): + r"""A request message for NodeTemplates.List. See the method description for details. @@ -78439,6 +82209,8 @@ class ListNetworkProfilesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + The name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -78476,6 +82248,10 @@ class ListNetworkProfilesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -78483,8 +82259,8 @@ class ListNetworkProfilesRequest(proto.Message): ) -class ListNetworksRequest(proto.Message): - r"""A request message for Networks.List. See the method +class ListNodeTypesRequest(proto.Message): + r"""A request message for NodeTypes.List. See the method description for details. @@ -78607,6 +82383,8 @@ class ListNetworksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -78638,10 +82416,14 @@ class ListNetworksRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListNodeGroupsRequest(proto.Message): - r"""A request message for NodeGroups.List. See the method +class ListNodesNodeGroupsRequest(proto.Message): + r"""A request message for NodeGroups.ListNodes. See the method description for details. @@ -78728,6 +82510,9 @@ class ListNodeGroupsRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + node_group (str): + Name of the NodeGroup resource whose nodes + you want to list. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -78778,6 +82563,10 @@ class ListNodeGroupsRequest(proto.Message): number=54715419, optional=True, ) + node_group: str = proto.Field( + proto.STRING, + number=469958146, + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -78803,8 +82592,168 @@ class ListNodeGroupsRequest(proto.Message): ) -class ListNodeTemplatesRequest(proto.Message): - r"""A request message for NodeTemplates.List. See the method +class ListOrganizationSecurityPoliciesRequest(proto.Message): + r"""A request message for OrganizationSecurityPolicies.List. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + parent_id: str = proto.Field( + proto.STRING, + number=459714768, + optional=True, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListPacketMirroringsRequest(proto.Message): + r"""A request message for PacketMirrorings.List. See the method description for details. @@ -78917,7 +82866,7 @@ class ListNodeTemplatesRequest(proto.Message): project (str): Project ID for this request. region (str): - The name of the region for this request. + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -78966,14 +82915,20 @@ class ListNodeTemplatesRequest(proto.Message): ) -class ListNodeTypesRequest(proto.Message): - r"""A request message for NodeTypes.List. See the method - description for details. +class ListPeeringRoutesNetworksRequest(proto.Message): + r"""A request message for Networks.ListPeeringRoutes. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + direction (str): + The direction of the exchanged routes. + Check the Direction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_direction``. filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter @@ -79054,6 +83009,8 @@ class ListNodeTypesRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. + network (str): + Name of the network for this request. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -79077,8 +83034,19 @@ class ListNodeTypesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. + peering_name (str): + The response will show routes exchanged over + the given peering connection. + + This field is a member of `oneof`_ ``_peering_name``. project (str): Project ID for this request. + region (str): + The region of the request. The response will + include all subnet routes, static routes and + dynamic routes in the region. + + This field is a member of `oneof`_ ``_region``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -79090,10 +83058,30 @@ class ListNodeTypesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ + class Direction(proto.Enum): + r"""The direction of the exchanged routes. + + Values: + UNDEFINED_DIRECTION (0): + A value indicating that the enum field is not + set. + INCOMING (338552870): + For routes exported from peer network. + OUTGOING (307438444): + For routes exported from local network. + """ + + UNDEFINED_DIRECTION = 0 + INCOMING = 338552870 + OUTGOING = 307438444 + + direction: str = proto.Field( + proto.STRING, + number=111150975, + optional=True, + ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -79104,6 +83092,10 @@ class ListNodeTypesRequest(proto.Message): number=54715419, optional=True, ) + network: str = proto.Field( + proto.STRING, + number=232872494, + ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -79114,23 +83106,30 @@ class ListNodeTypesRequest(proto.Message): number=19994697, optional=True, ) + peering_name: str = proto.Field( + proto.STRING, + number=249571370, + optional=True, + ) project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListNodesNodeGroupsRequest(proto.Message): - r"""A request message for NodeGroups.ListNodes. See the method +class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): + r"""A request message for + InstanceGroupManagers.ListPerInstanceConfigs. See the method description for details. @@ -79208,6 +83207,9 @@ class ListNodesNodeGroupsRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -79217,9 +83219,6 @@ class ListNodesNodeGroupsRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. - node_group (str): - Name of the NodeGroup resource whose nodes - you want to list. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -79257,7 +83256,9 @@ class ListNodesNodeGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - The name of the zone for this request. + The name of thezone + where the managed instance group is located. + It should conform to RFC1035. """ filter: str = proto.Field( @@ -79265,15 +83266,15 @@ class ListNodesNodeGroupsRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, optional=True, ) - node_group: str = proto.Field( - proto.STRING, - number=469958146, - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -79299,9 +83300,10 @@ class ListNodesNodeGroupsRequest(proto.Message): ) -class ListOrganizationSecurityPoliciesRequest(proto.Message): - r"""A request message for OrganizationSecurityPolicies.List. See - the method description for details. +class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagers.ListPerInstanceConfigs. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -79378,6 +83380,9 @@ class ListOrganizationSecurityPoliciesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. It + should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -79410,10 +83415,11 @@ class ListOrganizationSecurityPoliciesRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_id (str): - Parent ID for this request. - - This field is a member of `oneof`_ ``_parent_id``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request, + should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -79432,6 +83438,10 @@ class ListOrganizationSecurityPoliciesRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -79447,10 +83457,13 @@ class ListOrganizationSecurityPoliciesRequest(proto.Message): number=19994697, optional=True, ) - parent_id: str = proto.Field( + project: str = proto.Field( proto.STRING, - number=459714768, - optional=True, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -79459,9 +83472,10 @@ class ListOrganizationSecurityPoliciesRequest(proto.Message): ) -class ListPacketMirroringsRequest(proto.Message): - r"""A request message for PacketMirrorings.List. See the method - description for details. +class ListPreconfiguredExpressionSetsOrganizationSecurityPoliciesRequest(proto.Message): + r"""A request message for + OrganizationSecurityPolicies.ListPreconfiguredExpressionSets. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -79570,10 +83584,10 @@ class ListPacketMirroringsRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - region (str): - Name of the region for this request. + parent_id (str): + Parent ID for this request. + + This field is a member of `oneof`_ ``_parent_id``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -79607,13 +83621,10 @@ class ListPacketMirroringsRequest(proto.Message): number=19994697, optional=True, ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( + parent_id: str = proto.Field( proto.STRING, - number=138946292, + number=459714768, + optional=True, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -79622,20 +83633,15 @@ class ListPacketMirroringsRequest(proto.Message): ) -class ListPeeringRoutesNetworksRequest(proto.Message): - r"""A request message for Networks.ListPeeringRoutes. See the - method description for details. +class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): + r"""A request message for + SecurityPolicies.ListPreconfiguredExpressionSets. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - direction (str): - The direction of the exchanged routes. - Check the Direction enum for the list of - possible values. - - This field is a member of `oneof`_ ``_direction``. filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter @@ -79716,8 +83722,6 @@ class ListPeeringRoutesNetworksRequest(proto.Message): ``500``, inclusive. (Default: ``500``) This field is a member of `oneof`_ ``_max_results``. - network (str): - Name of the network for this request. order_by (str): Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource @@ -79741,19 +83745,8 @@ class ListPeeringRoutesNetworksRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - peering_name (str): - The response will show routes exchanged over - the given peering connection. - - This field is a member of `oneof`_ ``_peering_name``. project (str): Project ID for this request. - region (str): - The region of the request. The response will - include all subnet routes, static routes and - dynamic routes in the region. - - This field is a member of `oneof`_ ``_region``. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -79767,28 +83760,6 @@ class ListPeeringRoutesNetworksRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. """ - class Direction(proto.Enum): - r"""The direction of the exchanged routes. - - Values: - UNDEFINED_DIRECTION (0): - A value indicating that the enum field is not - set. - INCOMING (338552870): - For routes exported from peer network. - OUTGOING (307438444): - For routes exported from local network. - """ - - UNDEFINED_DIRECTION = 0 - INCOMING = 338552870 - OUTGOING = 307438444 - - direction: str = proto.Field( - proto.STRING, - number=111150975, - optional=True, - ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -79799,10 +83770,6 @@ class Direction(proto.Enum): number=54715419, optional=True, ) - network: str = proto.Field( - proto.STRING, - number=232872494, - ) order_by: str = proto.Field( proto.STRING, number=160562920, @@ -79813,20 +83780,10 @@ class Direction(proto.Enum): number=19994697, optional=True, ) - peering_name: str = proto.Field( - proto.STRING, - number=249571370, - optional=True, - ) project: str = proto.Field( proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - optional=True, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -79834,9 +83791,8 @@ class Direction(proto.Enum): ) -class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): - r"""A request message for - InstanceGroupManagers.ListPerInstanceConfigs. See the method +class ListPreviewFeaturesRequest(proto.Message): + r"""A request message for PreviewFeatures.List. See the method description for details. @@ -79914,9 +83870,6 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It - should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -79962,10 +83915,6 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of thezone - where the managed instance group is located. - It should conform to RFC1035. """ filter: str = proto.Field( @@ -79973,10 +83922,6 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -80001,15 +83946,10 @@ class ListPerInstanceConfigsInstanceGroupManagersRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): - r"""A request message for - RegionInstanceGroupManagers.ListPerInstanceConfigs. See the +class ListPublicAdvertisedPrefixesRequest(proto.Message): + r"""A request message for PublicAdvertisedPrefixes.List. See the method description for details. @@ -80087,9 +84027,6 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance_group_manager (str): - The name of the managed instance group. It - should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -80124,9 +84061,6 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request, - should conform to RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -80145,10 +84079,6 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): number=336120696, optional=True, ) - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -80168,10 +84098,6 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -80179,10 +84105,9 @@ class ListPerInstanceConfigsRegionInstanceGroupManagersRequest(proto.Message): ) -class ListPreconfiguredExpressionSetsOrganizationSecurityPoliciesRequest(proto.Message): - r"""A request message for - OrganizationSecurityPolicies.ListPreconfiguredExpressionSets. - See the method description for details. +class ListPublicDelegatedPrefixesRequest(proto.Message): + r"""A request message for PublicDelegatedPrefixes.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -80291,10 +84216,10 @@ class ListPreconfiguredExpressionSetsOrganizationSecurityPoliciesRequest(proto.M the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_id (str): - Parent ID for this request. - - This field is a member of `oneof`_ ``_parent_id``. + project (str): + Project ID for this request. + region (str): + Name of the region of this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -80328,10 +84253,13 @@ class ListPreconfiguredExpressionSetsOrganizationSecurityPoliciesRequest(proto.M number=19994697, optional=True, ) - parent_id: str = proto.Field( + project: str = proto.Field( proto.STRING, - number=459714768, - optional=True, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, ) return_partial_success: bool = proto.Field( proto.BOOL, @@ -80340,9 +84268,8 @@ class ListPreconfiguredExpressionSetsOrganizationSecurityPoliciesRequest(proto.M ) -class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): - r"""A request message for - SecurityPolicies.ListPreconfiguredExpressionSets. See the method +class ListReferrersInstancesRequest(proto.Message): + r"""A request message for Instances.ListReferrers. See the method description for details. @@ -80420,6 +84347,10 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance (str): + Name of the target instance scoping this + request, or '-' if the request should span over + all instances in the container. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -80465,6 +84396,8 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -80472,6 +84405,10 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): number=336120696, optional=True, ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -80496,10 +84433,14 @@ class ListPreconfiguredExpressionSetsSecurityPoliciesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListPreviewFeaturesRequest(proto.Message): - r"""A request message for PreviewFeatures.List. See the method +class ListRegionAutoscalersRequest(proto.Message): + r"""A request message for RegionAutoscalers.List. See the method description for details. @@ -80611,6 +84552,8 @@ class ListPreviewFeaturesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -80648,6 +84591,10 @@ class ListPreviewFeaturesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -80655,8 +84602,8 @@ class ListPreviewFeaturesRequest(proto.Message): ) -class ListPublicAdvertisedPrefixesRequest(proto.Message): - r"""A request message for PublicAdvertisedPrefixes.List. See the +class ListRegionBackendBucketsRequest(proto.Message): + r"""A request message for RegionBackendBuckets.List. See the method description for details. @@ -80768,6 +84715,8 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region of this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -80805,6 +84754,10 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -80812,8 +84765,8 @@ class ListPublicAdvertisedPrefixesRequest(proto.Message): ) -class ListPublicDelegatedPrefixesRequest(proto.Message): - r"""A request message for PublicDelegatedPrefixes.List. See the +class ListRegionBackendServicesRequest(proto.Message): + r"""A request message for RegionBackendServices.List. See the method description for details. @@ -80926,7 +84879,7 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region of this request. + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -80975,8 +84928,8 @@ class ListPublicDelegatedPrefixesRequest(proto.Message): ) -class ListReferrersInstancesRequest(proto.Message): - r"""A request message for Instances.ListReferrers. See the method +class ListRegionCommitmentsRequest(proto.Message): + r"""A request message for RegionCommitments.List. See the method description for details. @@ -81054,10 +85007,6 @@ class ListReferrersInstancesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. - instance (str): - Name of the target instance scoping this - request, or '-' if the request should span over - all instances in the container. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -81092,6 +85041,8 @@ class ListReferrersInstancesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -81103,8 +85054,6 @@ class ListReferrersInstancesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -81112,10 +85061,6 @@ class ListReferrersInstancesRequest(proto.Message): number=336120696, optional=True, ) - instance: str = proto.Field( - proto.STRING, - number=18257045, - ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -81135,20 +85080,20 @@ class ListReferrersInstancesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListRegionAutoscalersRequest(proto.Message): - r"""A request message for RegionAutoscalers.List. See the method - description for details. +class ListRegionCompositeHealthChecksRequest(proto.Message): + r"""A request message for RegionCompositeHealthChecks.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -81309,9 +85254,9 @@ class ListRegionAutoscalersRequest(proto.Message): ) -class ListRegionBackendServicesRequest(proto.Message): - r"""A request message for RegionBackendServices.List. See the - method description for details. +class ListRegionDiskTypesRequest(proto.Message): + r"""A request message for RegionDiskTypes.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -81423,7 +85368,7 @@ class ListRegionBackendServicesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -81472,8 +85417,8 @@ class ListRegionBackendServicesRequest(proto.Message): ) -class ListRegionCommitmentsRequest(proto.Message): - r"""A request message for RegionCommitments.List. See the method +class ListRegionDisksRequest(proto.Message): + r"""A request message for RegionDisks.List. See the method description for details. @@ -81635,9 +85580,9 @@ class ListRegionCommitmentsRequest(proto.Message): ) -class ListRegionCompositeHealthChecksRequest(proto.Message): - r"""A request message for RegionCompositeHealthChecks.List. See - the method description for details. +class ListRegionHealthAggregationPoliciesRequest(proto.Message): + r"""A request message for RegionHealthAggregationPolicies.List. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -81798,9 +85743,9 @@ class ListRegionCompositeHealthChecksRequest(proto.Message): ) -class ListRegionDiskTypesRequest(proto.Message): - r"""A request message for RegionDiskTypes.List. See the method - description for details. +class ListRegionHealthCheckServicesRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -81912,7 +85857,7 @@ class ListRegionDiskTypesRequest(proto.Message): project (str): Project ID for this request. region (str): - The name of the region for this request. + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -81961,8 +85906,8 @@ class ListRegionDiskTypesRequest(proto.Message): ) -class ListRegionDisksRequest(proto.Message): - r"""A request message for RegionDisks.List. See the method +class ListRegionHealthChecksRequest(proto.Message): + r"""A request message for RegionHealthChecks.List. See the method description for details. @@ -82075,7 +86020,170 @@ class ListRegionDisksRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region for this request. + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionHealthSourcesRequest(proto.Message): + r"""A request message for RegionHealthSources.List. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -82124,9 +86232,10 @@ class ListRegionDisksRequest(proto.Message): ) -class ListRegionHealthAggregationPoliciesRequest(proto.Message): - r"""A request message for RegionHealthAggregationPolicies.List. - See the method description for details. +class ListRegionInstanceGroupManagerResizeRequestsRequest(proto.Message): + r"""A request message for + RegionInstanceGroupManagerResizeRequests.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -82203,6 +86312,9 @@ class ListRegionHealthAggregationPoliciesRequest(proto.Message): regular expressions. This field is a member of `oneof`_ ``_filter``. + instance_group_manager (str): + The name of the managed instance group. The + name should conform to RFC1035. max_results (int): The maximum number of results per page that should be returned. If the number of available results is larger than @@ -82238,7 +86350,9 @@ class ListRegionHealthAggregationPoliciesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region + scoping this request. Name should conform to + RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -82257,6 +86371,10 @@ class ListRegionHealthAggregationPoliciesRequest(proto.Message): number=336120696, optional=True, ) + instance_group_manager: str = proto.Field( + proto.STRING, + number=249363395, + ) max_results: int = proto.Field( proto.UINT32, number=54715419, @@ -82287,9 +86405,9 @@ class ListRegionHealthAggregationPoliciesRequest(proto.Message): ) -class ListRegionHealthCheckServicesRequest(proto.Message): - r"""A request message for RegionHealthCheckServices.List. See the - method description for details. +class ListRegionInstanceGroupManagersRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -82450,9 +86568,9 @@ class ListRegionHealthCheckServicesRequest(proto.Message): ) -class ListRegionHealthChecksRequest(proto.Message): - r"""A request message for RegionHealthChecks.List. See the method - description for details. +class ListRegionInstanceGroupsRequest(proto.Message): + r"""A request message for RegionInstanceGroups.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -82613,8 +86731,8 @@ class ListRegionHealthChecksRequest(proto.Message): ) -class ListRegionHealthSourcesRequest(proto.Message): - r"""A request message for RegionHealthSources.List. See the +class ListRegionInstanceTemplatesRequest(proto.Message): + r"""A request message for RegionInstanceTemplates.List. See the method description for details. @@ -82727,7 +86845,7 @@ class ListRegionHealthSourcesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the regions for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -82776,8 +86894,8 @@ class ListRegionHealthSourcesRequest(proto.Message): ) -class ListRegionInstanceGroupManagersRequest(proto.Message): - r"""A request message for RegionInstanceGroupManagers.List. See +class ListRegionInstantSnapshotGroupsRequest(proto.Message): + r"""A request message for RegionInstantSnapshotGroups.List. See the method description for details. @@ -82890,7 +87008,7 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -82939,8 +87057,8 @@ class ListRegionInstanceGroupManagersRequest(proto.Message): ) -class ListRegionInstanceGroupsRequest(proto.Message): - r"""A request message for RegionInstanceGroups.List. See the +class ListRegionInstantSnapshotsRequest(proto.Message): + r"""A request message for RegionInstantSnapshots.List. See the method description for details. @@ -83053,7 +87171,7 @@ class ListRegionInstanceGroupsRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + The name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -83102,9 +87220,9 @@ class ListRegionInstanceGroupsRequest(proto.Message): ) -class ListRegionInstanceTemplatesRequest(proto.Message): - r"""A request message for RegionInstanceTemplates.List. See the - method description for details. +class ListRegionNetworkEndpointGroupsRequest(proto.Message): + r"""A request message for RegionNetworkEndpointGroups.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -83216,7 +87334,9 @@ class ListRegionInstanceTemplatesRequest(proto.Message): project (str): Project ID for this request. region (str): - The name of the regions for this request. + The name of theregion + where the network endpoint group is located. It + should comply with RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -83265,9 +87385,9 @@ class ListRegionInstanceTemplatesRequest(proto.Message): ) -class ListRegionInstantSnapshotsRequest(proto.Message): - r"""A request message for RegionInstantSnapshots.List. See the - method description for details. +class ListRegionNetworkFirewallPoliciesRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.List. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -83379,7 +87499,7 @@ class ListRegionInstantSnapshotsRequest(proto.Message): project (str): Project ID for this request. region (str): - The name of the region for this request. + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -83428,8 +87548,8 @@ class ListRegionInstantSnapshotsRequest(proto.Message): ) -class ListRegionNetworkEndpointGroupsRequest(proto.Message): - r"""A request message for RegionNetworkEndpointGroups.List. See +class ListRegionNotificationEndpointsRequest(proto.Message): + r"""A request message for RegionNotificationEndpoints.List. See the method description for details. @@ -83542,9 +87662,7 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): project (str): Project ID for this request. region (str): - The name of theregion - where the network endpoint group is located. It - should comply with RFC1035. + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -83593,9 +87711,9 @@ class ListRegionNetworkEndpointGroupsRequest(proto.Message): ) -class ListRegionNetworkFirewallPoliciesRequest(proto.Message): - r"""A request message for RegionNetworkFirewallPolicies.List. See - the method description for details. +class ListRegionOperationsRequest(proto.Message): + r"""A request message for RegionOperations.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -83707,7 +87825,7 @@ class ListRegionNetworkFirewallPoliciesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -83756,9 +87874,9 @@ class ListRegionNetworkFirewallPoliciesRequest(proto.Message): ) -class ListRegionNotificationEndpointsRequest(proto.Message): - r"""A request message for RegionNotificationEndpoints.List. See - the method description for details. +class ListRegionSecurityPoliciesRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -83919,8 +88037,8 @@ class ListRegionNotificationEndpointsRequest(proto.Message): ) -class ListRegionOperationsRequest(proto.Message): - r"""A request message for RegionOperations.List. See the method +class ListRegionSnapshotsRequest(proto.Message): + r"""A request message for RegionSnapshots.List. See the method description for details. @@ -84082,8 +88200,8 @@ class ListRegionOperationsRequest(proto.Message): ) -class ListRegionSecurityPoliciesRequest(proto.Message): - r"""A request message for RegionSecurityPolicies.List. See the +class ListRegionSslCertificatesRequest(proto.Message): + r"""A request message for RegionSslCertificates.List. See the method description for details. @@ -84245,8 +88363,171 @@ class ListRegionSecurityPoliciesRequest(proto.Message): ) -class ListRegionSslCertificatesRequest(proto.Message): - r"""A request message for RegionSslCertificates.List. See the +class ListRegionSslPoliciesRequest(proto.Message): + r"""A request message for RegionSslPolicies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListRegionTargetHttpProxiesRequest(proto.Message): + r"""A request message for RegionTargetHttpProxies.List. See the method description for details. @@ -84408,9 +88689,9 @@ class ListRegionSslCertificatesRequest(proto.Message): ) -class ListRegionSslPoliciesRequest(proto.Message): - r"""A request message for RegionSslPolicies.List. See the method - description for details. +class ListRegionTargetHttpsProxiesRequest(proto.Message): + r"""A request message for RegionTargetHttpsProxies.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -84571,8 +88852,8 @@ class ListRegionSslPoliciesRequest(proto.Message): ) -class ListRegionTargetHttpProxiesRequest(proto.Message): - r"""A request message for RegionTargetHttpProxies.List. See the +class ListRegionTargetTcpProxiesRequest(proto.Message): + r"""A request message for RegionTargetTcpProxies.List. See the method description for details. @@ -84734,9 +89015,9 @@ class ListRegionTargetHttpProxiesRequest(proto.Message): ) -class ListRegionTargetHttpsProxiesRequest(proto.Message): - r"""A request message for RegionTargetHttpsProxies.List. See the - method description for details. +class ListRegionUrlMapsRequest(proto.Message): + r"""A request message for RegionUrlMaps.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -84897,9 +89178,9 @@ class ListRegionTargetHttpsProxiesRequest(proto.Message): ) -class ListRegionTargetTcpProxiesRequest(proto.Message): - r"""A request message for RegionTargetTcpProxies.List. See the - method description for details. +class ListRegionZonesRequest(proto.Message): + r"""A request message for RegionZones.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -85011,7 +89292,7 @@ class ListRegionTargetTcpProxiesRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -85060,8 +89341,8 @@ class ListRegionTargetTcpProxiesRequest(proto.Message): ) -class ListRegionUrlMapsRequest(proto.Message): - r"""A request message for RegionUrlMaps.List. See the method +class ListRegionsRequest(proto.Message): + r"""A request message for Regions.List. See the method description for details. @@ -85173,8 +89454,6 @@ class ListRegionUrlMapsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -85212,10 +89491,6 @@ class ListRegionUrlMapsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -85223,8 +89498,8 @@ class ListRegionUrlMapsRequest(proto.Message): ) -class ListRegionZonesRequest(proto.Message): - r"""A request message for RegionZones.List. See the method +class ListReservationBlocksRequest(proto.Message): + r"""A request message for ReservationBlocks.List. See the method description for details. @@ -85336,8 +89611,10 @@ class ListRegionZonesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Region for this request. + reservation (str): + The name of the reservation. + Name should conform to RFC1035 or be a resource + ID. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -85349,6 +89626,9 @@ class ListRegionZonesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for this request. Zone name + should conform to RFC1035. """ filter: str = proto.Field( @@ -85375,19 +89655,23 @@ class ListRegionZonesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( + reservation: str = proto.Field( proto.STRING, - number=138946292, + number=47530956, ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListRegionsRequest(proto.Message): - r"""A request message for Regions.List. See the method +class ListReservationSlotsRequest(proto.Message): + r"""A request message for ReservationSlots.List. See the method description for details. @@ -85497,8 +89781,12 @@ class ListRegionsRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. + parent_name (str): + The name of the parent reservation and parent block, + formatted as + reservations/{reservation_name}/reservationBlocks/{reservation_block_name}/reservationSubBlocks/{reservation_sub_block_name} project (str): - Project ID for this request. + The project ID for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -85510,6 +89798,9 @@ class ListRegionsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request, + formatted as RFC1035. """ filter: str = proto.Field( @@ -85532,6 +89823,10 @@ class ListRegionsRequest(proto.Message): number=19994697, optional=True, ) + parent_name: str = proto.Field( + proto.STRING, + number=478151936, + ) project: str = proto.Field( proto.STRING, number=227560217, @@ -85541,11 +89836,15 @@ class ListRegionsRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListReservationBlocksRequest(proto.Message): - r"""A request message for ReservationBlocks.List. See the method - description for details. +class ListReservationSubBlocksRequest(proto.Message): + r"""A request message for ReservationSubBlocks.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -85654,12 +89953,12 @@ class ListReservationBlocksRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. + parent_name (str): + The name of the parent reservation and parent block. In the + format of + reservations/{reservation_name}/reservationBlocks/{reservation_block_name} project (str): Project ID for this request. - reservation (str): - The name of the reservation. - Name should conform to RFC1035 or be a resource - ID. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -85676,6 +89975,173 @@ class ListReservationBlocksRequest(proto.Message): should conform to RFC1035. """ + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + parent_name: str = proto.Field( + proto.STRING, + number=478151936, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class ListReservationsRequest(proto.Message): + r"""A request message for Reservations.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for this request. + """ + filter: str = proto.Field( proto.STRING, number=336120696, @@ -85700,10 +90166,6 @@ class ListReservationBlocksRequest(proto.Message): proto.STRING, number=227560217, ) - reservation: str = proto.Field( - proto.STRING, - number=47530956, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -85715,8 +90177,8 @@ class ListReservationBlocksRequest(proto.Message): ) -class ListReservationSlotsRequest(proto.Message): - r"""A request message for ReservationSlots.List. See the method +class ListResourcePoliciesRequest(proto.Message): + r"""A request message for ResourcePolicies.List. See the method description for details. @@ -85826,12 +90288,10 @@ class ListReservationSlotsRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_name (str): - The name of the parent reservation and parent block, - formatted as - reservations/{reservation_name}/reservationBlocks/{reservation_block_name}/reservationSubBlocks/{reservation_sub_block_name} project (str): - The project ID for this request. + Project ID for this request. + region (str): + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -85843,9 +90303,6 @@ class ListReservationSlotsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request, - formatted as RFC1035. """ filter: str = proto.Field( @@ -85868,27 +90325,23 @@ class ListReservationSlotsRequest(proto.Message): number=19994697, optional=True, ) - parent_name: str = proto.Field( - proto.STRING, - number=478151936, - ) project: str = proto.Field( proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListReservationSubBlocksRequest(proto.Message): - r"""A request message for ReservationSubBlocks.List. See the +class ListRoutePoliciesRoutersRequest(proto.Message): + r"""A request message for Routers.ListRoutePolicies. See the method description for details. @@ -85998,12 +90451,10 @@ class ListReservationSubBlocksRequest(proto.Message): the next page of results. This field is a member of `oneof`_ ``_page_token``. - parent_name (str): - The name of the parent reservation and parent block. In the - format of - reservations/{reservation_name}/reservationBlocks/{reservation_block_name} project (str): Project ID for this request. + region (str): + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -86015,9 +90466,9 @@ class ListReservationSubBlocksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - Name of the zone for this request. Zone name - should conform to RFC1035. + router (str): + Name or id of the resource for this request. + Name should conform to RFC1035. """ filter: str = proto.Field( @@ -86040,190 +90491,27 @@ class ListReservationSubBlocksRequest(proto.Message): number=19994697, optional=True, ) - parent_name: str = proto.Field( - proto.STRING, - number=478151936, - ) project: str = proto.Field( proto.STRING, number=227560217, ) - return_partial_success: bool = proto.Field( - proto.BOOL, - number=517198390, - optional=True, - ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) - - -class ListReservationsRequest(proto.Message): - r"""A request message for Reservations.List. See the method - description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - filter (str): - A filter expression that filters resources listed in the - response. Most Compute resources support two types of filter - expressions: expressions that support regular expressions - and expressions that follow API improvement proposal - AIP-160. These two types of filter expressions cannot be - mixed in one request. - - If you want to use AIP-160, your expression must specify the - field name, an operator, and the value that you want to use - for filtering. The value must be a string, a number, or a - boolean. The operator must be either ``=``, ``!=``, ``>``, - ``<``, ``<=``, ``>=`` or ``:``. - - For example, if you are filtering Compute Engine instances, - you can exclude instances named ``example-instance`` by - specifying ``name != example-instance``. - - The ``:*`` comparison can be used to test whether a key has - been defined. For example, to find all objects with - ``owner`` label use: - - :: - - labels.owner:* - - You can also filter nested fields. For example, you could - specify ``scheduling.automaticRestart = false`` to include - instances only if they are not scheduled for automatic - restarts. You can use filtering on nested fields to filter - based onresource labels. - - To filter on multiple expressions, provide each separate - expression within parentheses. For example: - - :: - - (scheduling.automaticRestart = true) - (cpuPlatform = "Intel Skylake") - - By default, each expression is an ``AND`` expression. - However, you can include ``AND`` and ``OR`` expressions - explicitly. For example: - - :: - - (cpuPlatform = "Intel Skylake") OR - (cpuPlatform = "Intel Broadwell") AND - (scheduling.automaticRestart = true) - - If you want to use a regular expression, use the ``eq`` - (equal) or ``ne`` (not equal) operator against a single - un-parenthesized expression with or without quotes or - against multiple parenthesized expressions. Examples: - - ``fieldname eq unquoted literal`` - ``fieldname eq 'single quoted literal'`` - ``fieldname eq "double quoted literal"`` - ``(fieldname1 eq literal) (fieldname2 ne "literal")`` - - The literal value is interpreted as a regular expression - using GoogleRE2 library syntax. The literal value must match - the entire field. - - For example, to filter for instances that do not end with - name "instance", you would use ``name ne .*instance``. - - You cannot combine constraints on multiple fields using - regular expressions. - - This field is a member of `oneof`_ ``_filter``. - max_results (int): - The maximum number of results per page that should be - returned. If the number of available results is larger than - ``maxResults``, Compute Engine returns a ``nextPageToken`` - that can be used to get the next page of results in - subsequent list requests. Acceptable values are ``0`` to - ``500``, inclusive. (Default: ``500``) - - This field is a member of `oneof`_ ``_max_results``. - order_by (str): - Sorts list results by a certain order. By default, results - are returned in alphanumerical order based on the resource - name. - - You can also sort results in descending order based on the - creation timestamp using - ``orderBy="creationTimestamp desc"``. This sorts results - based on the ``creationTimestamp`` field in reverse - chronological order (newest result first). Use this to sort - resources like operations so that the newest operation is - returned first. - - Currently, only sorting by ``name`` or - ``creationTimestamp desc`` is supported. - - This field is a member of `oneof`_ ``_order_by``. - page_token (str): - Specifies a page token to use. Set ``pageToken`` to the - ``nextPageToken`` returned by a previous list request to get - the next page of results. - - This field is a member of `oneof`_ ``_page_token``. - project (str): - Project ID for this request. - return_partial_success (bool): - Opt-in for partial success behavior which - provides partial results in case of failure. The - default value is false. - - For example, when partial success behavior is - enabled, aggregatedList for a single zone scope - either returns all resources in the zone or no - resources, with an error code. - - This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - Name of the zone for this request. - """ - - filter: str = proto.Field( - proto.STRING, - number=336120696, - optional=True, - ) - max_results: int = proto.Field( - proto.UINT32, - number=54715419, - optional=True, - ) - order_by: str = proto.Field( - proto.STRING, - number=160562920, - optional=True, - ) - page_token: str = proto.Field( - proto.STRING, - number=19994697, - optional=True, - ) - project: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=227560217, + number=138946292, ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( + router: str = proto.Field( proto.STRING, - number=3744684, + number=148608841, ) -class ListResourcePoliciesRequest(proto.Message): - r"""A request message for ResourcePolicies.List. See the method +class ListRoutersRequest(proto.Message): + r"""A request message for Routers.List. See the method description for details. @@ -86385,9 +90673,9 @@ class ListResourcePoliciesRequest(proto.Message): ) -class ListRoutePoliciesRoutersRequest(proto.Message): - r"""A request message for Routers.ListRoutePolicies. See the - method description for details. +class ListRoutesRequest(proto.Message): + r"""A request message for Routes.List. See the method description + for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -86498,8 +90786,6 @@ class ListRoutePoliciesRoutersRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -86511,9 +90797,6 @@ class ListRoutePoliciesRoutersRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - router (str): - Name or id of the resource for this request. - Name should conform to RFC1035. """ filter: str = proto.Field( @@ -86540,23 +90823,15 @@ class ListRoutePoliciesRoutersRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - router: str = proto.Field( - proto.STRING, - number=148608841, - ) -class ListRoutersRequest(proto.Message): - r"""A request message for Routers.List. See the method +class ListSecurityPoliciesRequest(proto.Message): + r"""A request message for SecurityPolicies.List. See the method description for details. @@ -86668,8 +90943,6 @@ class ListRoutersRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -86707,10 +90980,6 @@ class ListRoutersRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -86718,9 +90987,9 @@ class ListRoutersRequest(proto.Message): ) -class ListRoutesRequest(proto.Message): - r"""A request message for Routes.List. See the method description - for details. +class ListServiceAttachmentsRequest(proto.Message): + r"""A request message for ServiceAttachments.List. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -86831,6 +91100,8 @@ class ListRoutesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region of this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -86868,6 +91139,10 @@ class ListRoutesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -86875,8 +91150,8 @@ class ListRoutesRequest(proto.Message): ) -class ListSecurityPoliciesRequest(proto.Message): - r"""A request message for SecurityPolicies.List. See the method +class ListSnapshotsRequest(proto.Message): + r"""A request message for Snapshots.List. See the method description for details. @@ -87032,8 +91307,8 @@ class ListSecurityPoliciesRequest(proto.Message): ) -class ListServiceAttachmentsRequest(proto.Message): - r"""A request message for ServiceAttachments.List. See the method +class ListSslCertificatesRequest(proto.Message): + r"""A request message for SslCertificates.List. See the method description for details. @@ -87145,8 +91420,6 @@ class ListServiceAttachmentsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region of this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -87184,10 +91457,6 @@ class ListServiceAttachmentsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -87195,8 +91464,8 @@ class ListServiceAttachmentsRequest(proto.Message): ) -class ListSnapshotsRequest(proto.Message): - r"""A request message for Snapshots.List. See the method +class ListSslPoliciesRequest(proto.Message): + r"""A request message for SslPolicies.List. See the method description for details. @@ -87352,8 +91621,8 @@ class ListSnapshotsRequest(proto.Message): ) -class ListSslCertificatesRequest(proto.Message): - r"""A request message for SslCertificates.List. See the method +class ListStoragePoolTypesRequest(proto.Message): + r"""A request message for StoragePoolTypes.List. See the method description for details. @@ -87476,6 +91745,8 @@ class ListSslCertificatesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -87507,10 +91778,14 @@ class ListSslCertificatesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListSslPoliciesRequest(proto.Message): - r"""A request message for SslPolicies.List. See the method +class ListStoragePoolsRequest(proto.Message): + r"""A request message for StoragePools.List. See the method description for details. @@ -87633,6 +91908,8 @@ class ListSslPoliciesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + The name of the zone for this request. """ filter: str = proto.Field( @@ -87664,10 +91941,14 @@ class ListSslPoliciesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListStoragePoolTypesRequest(proto.Message): - r"""A request message for StoragePoolTypes.List. See the method +class ListSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.List. See the method description for details. @@ -87779,6 +92060,8 @@ class ListStoragePoolTypesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -87790,10 +92073,43 @@ class ListStoragePoolTypesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. + views (str): + Defines the extra views returned back in the subnetwork + resource. Supported values: + + :: + + - WITH_UTILIZATION: Utilization data is included in the + response. + + Check the Views enum for the list of possible values. + + This field is a member of `oneof`_ ``_views``. """ + class Views(proto.Enum): + r"""Defines the extra views returned back in the subnetwork resource. + Supported values: + + :: + + - WITH_UTILIZATION: Utilization data is included in the + response. + + Values: + UNDEFINED_VIEWS (0): + A value indicating that the enum field is not + set. + DEFAULT (115302945): + No description available. + WITH_UTILIZATION (504090633): + Utilization data is included in the response. + """ + + UNDEFINED_VIEWS = 0 + DEFAULT = 115302945 + WITH_UTILIZATION = 504090633 + filter: str = proto.Field( proto.STRING, number=336120696, @@ -87818,19 +92134,181 @@ class ListStoragePoolTypesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - zone: str = proto.Field( + views: str = proto.Field( proto.STRING, - number=3744684, + number=112204398, + optional=True, ) -class ListStoragePoolsRequest(proto.Message): - r"""A request message for StoragePools.List. See the method +class ListTargetGrpcProxiesRequest(proto.Message): + r"""A request message for TargetGrpcProxies.List. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + filter (str): + A filter expression that filters resources listed in the + response. Most Compute resources support two types of filter + expressions: expressions that support regular expressions + and expressions that follow API improvement proposal + AIP-160. These two types of filter expressions cannot be + mixed in one request. + + If you want to use AIP-160, your expression must specify the + field name, an operator, and the value that you want to use + for filtering. The value must be a string, a number, or a + boolean. The operator must be either ``=``, ``!=``, ``>``, + ``<``, ``<=``, ``>=`` or ``:``. + + For example, if you are filtering Compute Engine instances, + you can exclude instances named ``example-instance`` by + specifying ``name != example-instance``. + + The ``:*`` comparison can be used to test whether a key has + been defined. For example, to find all objects with + ``owner`` label use: + + :: + + labels.owner:* + + You can also filter nested fields. For example, you could + specify ``scheduling.automaticRestart = false`` to include + instances only if they are not scheduled for automatic + restarts. You can use filtering on nested fields to filter + based onresource labels. + + To filter on multiple expressions, provide each separate + expression within parentheses. For example: + + :: + + (scheduling.automaticRestart = true) + (cpuPlatform = "Intel Skylake") + + By default, each expression is an ``AND`` expression. + However, you can include ``AND`` and ``OR`` expressions + explicitly. For example: + + :: + + (cpuPlatform = "Intel Skylake") OR + (cpuPlatform = "Intel Broadwell") AND + (scheduling.automaticRestart = true) + + If you want to use a regular expression, use the ``eq`` + (equal) or ``ne`` (not equal) operator against a single + un-parenthesized expression with or without quotes or + against multiple parenthesized expressions. Examples: + + ``fieldname eq unquoted literal`` + ``fieldname eq 'single quoted literal'`` + ``fieldname eq "double quoted literal"`` + ``(fieldname1 eq literal) (fieldname2 ne "literal")`` + + The literal value is interpreted as a regular expression + using GoogleRE2 library syntax. The literal value must match + the entire field. + + For example, to filter for instances that do not end with + name "instance", you would use ``name ne .*instance``. + + You cannot combine constraints on multiple fields using + regular expressions. + + This field is a member of `oneof`_ ``_filter``. + max_results (int): + The maximum number of results per page that should be + returned. If the number of available results is larger than + ``maxResults``, Compute Engine returns a ``nextPageToken`` + that can be used to get the next page of results in + subsequent list requests. Acceptable values are ``0`` to + ``500``, inclusive. (Default: ``500``) + + This field is a member of `oneof`_ ``_max_results``. + order_by (str): + Sorts list results by a certain order. By default, results + are returned in alphanumerical order based on the resource + name. + + You can also sort results in descending order based on the + creation timestamp using + ``orderBy="creationTimestamp desc"``. This sorts results + based on the ``creationTimestamp`` field in reverse + chronological order (newest result first). Use this to sort + resources like operations so that the newest operation is + returned first. + + Currently, only sorting by ``name`` or + ``creationTimestamp desc`` is supported. + + This field is a member of `oneof`_ ``_order_by``. + page_token (str): + Specifies a page token to use. Set ``pageToken`` to the + ``nextPageToken`` returned by a previous list request to get + the next page of results. + + This field is a member of `oneof`_ ``_page_token``. + project (str): + Project ID for this request. + return_partial_success (bool): + Opt-in for partial success behavior which + provides partial results in case of failure. The + default value is false. + + For example, when partial success behavior is + enabled, aggregatedList for a single zone scope + either returns all resources in the zone or no + resources, with an error code. + + This field is a member of `oneof`_ ``_return_partial_success``. + """ + + filter: str = proto.Field( + proto.STRING, + number=336120696, + optional=True, + ) + max_results: int = proto.Field( + proto.UINT32, + number=54715419, + optional=True, + ) + order_by: str = proto.Field( + proto.STRING, + number=160562920, + optional=True, + ) + page_token: str = proto.Field( + proto.STRING, + number=19994697, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + return_partial_success: bool = proto.Field( + proto.BOOL, + number=517198390, + optional=True, + ) + + +class ListTargetHttpProxiesRequest(proto.Message): + r"""A request message for TargetHttpProxies.List. See the method description for details. @@ -87953,8 +92431,6 @@ class ListStoragePoolsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - The name of the zone for this request. """ filter: str = proto.Field( @@ -87986,14 +92462,10 @@ class ListStoragePoolsRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.List. See the method +class ListTargetHttpsProxiesRequest(proto.Message): + r"""A request message for TargetHttpsProxies.List. See the method description for details. @@ -88105,8 +92577,6 @@ class ListSubnetworksRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -88118,43 +92588,8 @@ class ListSubnetworksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - views (str): - Defines the extra views returned back in the subnetwork - resource. Supported values: - - :: - - - WITH_UTILIZATION: Utilization data is included in the - response. - - Check the Views enum for the list of possible values. - - This field is a member of `oneof`_ ``_views``. """ - class Views(proto.Enum): - r"""Defines the extra views returned back in the subnetwork resource. - Supported values: - - :: - - - WITH_UTILIZATION: Utilization data is included in the - response. - - Values: - UNDEFINED_VIEWS (0): - A value indicating that the enum field is not - set. - DEFAULT (115302945): - No description available. - WITH_UTILIZATION (504090633): - Utilization data is included in the response. - """ - - UNDEFINED_VIEWS = 0 - DEFAULT = 115302945 - WITH_UTILIZATION = 504090633 - filter: str = proto.Field( proto.STRING, number=336120696, @@ -88179,24 +92614,15 @@ class Views(proto.Enum): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - views: str = proto.Field( - proto.STRING, - number=112204398, - optional=True, - ) -class ListTargetGrpcProxiesRequest(proto.Message): - r"""A request message for TargetGrpcProxies.List. See the method +class ListTargetInstancesRequest(proto.Message): + r"""A request message for TargetInstances.List. See the method description for details. @@ -88319,6 +92745,8 @@ class ListTargetGrpcProxiesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone scoping this request. """ filter: str = proto.Field( @@ -88350,10 +92778,14 @@ class ListTargetGrpcProxiesRequest(proto.Message): number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListTargetHttpProxiesRequest(proto.Message): - r"""A request message for TargetHttpProxies.List. See the method +class ListTargetPoolsRequest(proto.Message): + r"""A request message for TargetPools.List. See the method description for details. @@ -88465,6 +92897,8 @@ class ListTargetHttpProxiesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -88502,6 +92936,10 @@ class ListTargetHttpProxiesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -88509,8 +92947,8 @@ class ListTargetHttpProxiesRequest(proto.Message): ) -class ListTargetHttpsProxiesRequest(proto.Message): - r"""A request message for TargetHttpsProxies.List. See the method +class ListTargetSslProxiesRequest(proto.Message): + r"""A request message for TargetSslProxies.List. See the method description for details. @@ -88666,8 +93104,8 @@ class ListTargetHttpsProxiesRequest(proto.Message): ) -class ListTargetInstancesRequest(proto.Message): - r"""A request message for TargetInstances.List. See the method +class ListTargetTcpProxiesRequest(proto.Message): + r"""A request message for TargetTcpProxies.List. See the method description for details. @@ -88790,8 +93228,6 @@ class ListTargetInstancesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - zone (str): - Name of the zone scoping this request. """ filter: str = proto.Field( @@ -88823,14 +93259,10 @@ class ListTargetInstancesRequest(proto.Message): number=517198390, optional=True, ) - zone: str = proto.Field( - proto.STRING, - number=3744684, - ) -class ListTargetPoolsRequest(proto.Message): - r"""A request message for TargetPools.List. See the method +class ListTargetVpnGatewaysRequest(proto.Message): + r"""A request message for TargetVpnGateways.List. See the method description for details. @@ -88943,7 +93375,7 @@ class ListTargetPoolsRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -88992,8 +93424,8 @@ class ListTargetPoolsRequest(proto.Message): ) -class ListTargetSslProxiesRequest(proto.Message): - r"""A request message for TargetSslProxies.List. See the method +class ListUrlMapsRequest(proto.Message): + r"""A request message for UrlMaps.List. See the method description for details. @@ -89149,9 +93581,9 @@ class ListTargetSslProxiesRequest(proto.Message): ) -class ListTargetTcpProxiesRequest(proto.Message): - r"""A request message for TargetTcpProxies.List. See the method - description for details. +class ListUsableBackendBucketsRequest(proto.Message): + r"""A request message for BackendBuckets.ListUsable. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -89306,9 +93738,9 @@ class ListTargetTcpProxiesRequest(proto.Message): ) -class ListTargetVpnGatewaysRequest(proto.Message): - r"""A request message for TargetVpnGateways.List. See the method - description for details. +class ListUsableBackendServicesRequest(proto.Message): + r"""A request message for BackendServices.ListUsable. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -89419,8 +93851,6 @@ class ListTargetVpnGatewaysRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -89458,10 +93888,6 @@ class ListTargetVpnGatewaysRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -89469,9 +93895,9 @@ class ListTargetVpnGatewaysRequest(proto.Message): ) -class ListUrlMapsRequest(proto.Message): - r"""A request message for UrlMaps.List. See the method - description for details. +class ListUsableRegionBackendBucketsRequest(proto.Message): + r"""A request message for RegionBackendBuckets.ListUsable. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -89582,6 +94008,10 @@ class ListUrlMapsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. + It must be a string that meets the requirements + in RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -89619,6 +94049,10 @@ class ListUrlMapsRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -89626,9 +94060,9 @@ class ListUrlMapsRequest(proto.Message): ) -class ListUsableBackendServicesRequest(proto.Message): - r"""A request message for BackendServices.ListUsable. See the - method description for details. +class ListUsableRegionBackendServicesRequest(proto.Message): + r"""A request message for RegionBackendServices.ListUsable. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -89739,6 +94173,10 @@ class ListUsableBackendServicesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region scoping this request. + It must be a string that meets the requirements + in RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -89776,6 +94214,10 @@ class ListUsableBackendServicesRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -89783,9 +94225,9 @@ class ListUsableBackendServicesRequest(proto.Message): ) -class ListUsableRegionBackendServicesRequest(proto.Message): - r"""A request message for RegionBackendServices.ListUsable. See - the method description for details. +class ListUsableSubnetworksRequest(proto.Message): + r"""A request message for Subnetworks.ListUsable. See the method + description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -89896,10 +94338,6 @@ class ListUsableRegionBackendServicesRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region scoping this request. - It must be a string that meets the requirements - in RFC1035. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -89911,6 +94349,13 @@ class ListUsableRegionBackendServicesRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + service_project (str): + The project id or project number in which the subnetwork is + intended to be used. Only applied for Shared VPC. See + `Shared VPC + documentation `__ + + This field is a member of `oneof`_ ``_service_project``. """ filter: str = proto.Field( @@ -89937,19 +94382,20 @@ class ListUsableRegionBackendServicesRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + service_project: str = proto.Field( + proto.STRING, + number=530592655, + optional=True, + ) -class ListUsableSubnetworksRequest(proto.Message): - r"""A request message for Subnetworks.ListUsable. See the method +class ListVpnGatewaysRequest(proto.Message): + r"""A request message for VpnGateways.List. See the method description for details. @@ -90061,6 +94507,8 @@ class ListUsableSubnetworksRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + region (str): + Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -90072,13 +94520,6 @@ class ListUsableSubnetworksRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. - service_project (str): - The project id or project number in which the subnetwork is - intended to be used. Only applied for Shared VPC. See - `Shared VPC - documentation `__ - - This field is a member of `oneof`_ ``_service_project``. """ filter: str = proto.Field( @@ -90105,20 +94546,19 @@ class ListUsableSubnetworksRequest(proto.Message): proto.STRING, number=227560217, ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) - service_project: str = proto.Field( - proto.STRING, - number=530592655, - optional=True, - ) -class ListVpnGatewaysRequest(proto.Message): - r"""A request message for VpnGateways.List. See the method +class ListVpnTunnelsRequest(proto.Message): + r"""A request message for VpnTunnels.List. See the method description for details. @@ -90280,14 +94720,16 @@ class ListVpnGatewaysRequest(proto.Message): ) -class ListVpnTunnelsRequest(proto.Message): - r"""A request message for VpnTunnels.List. See the method +class ListWireGroupsRequest(proto.Message): + r"""A request message for WireGroups.List. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + cross_site_network (str): + filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter @@ -90393,8 +94835,6 @@ class ListVpnTunnelsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - region (str): - Name of the region for this request. return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -90408,6 +94848,10 @@ class ListVpnTunnelsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. """ + cross_site_network: str = proto.Field( + proto.STRING, + number=108192469, + ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -90432,10 +94876,6 @@ class ListVpnTunnelsRequest(proto.Message): proto.STRING, number=227560217, ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -90443,16 +94883,14 @@ class ListVpnTunnelsRequest(proto.Message): ) -class ListWireGroupsRequest(proto.Message): - r"""A request message for WireGroups.List. See the method +class ListXpnHostsProjectsRequest(proto.Message): + r"""A request message for Projects.ListXpnHosts. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - cross_site_network (str): - filter (str): A filter expression that filters resources listed in the response. Most Compute resources support two types of filter @@ -90558,6 +94996,8 @@ class ListWireGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. + projects_list_xpn_hosts_request_resource (google.cloud.compute_v1.types.ProjectsListXpnHostsRequest): + The body resource for this request return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -90571,10 +95011,6 @@ class ListWireGroupsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. """ - cross_site_network: str = proto.Field( - proto.STRING, - number=108192469, - ) filter: str = proto.Field( proto.STRING, number=336120696, @@ -90599,6 +95035,13 @@ class ListWireGroupsRequest(proto.Message): proto.STRING, number=227560217, ) + projects_list_xpn_hosts_request_resource: "ProjectsListXpnHostsRequest" = ( + proto.Field( + proto.MESSAGE, + number=238266391, + message="ProjectsListXpnHostsRequest", + ) + ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, @@ -90606,8 +95049,8 @@ class ListWireGroupsRequest(proto.Message): ) -class ListXpnHostsProjectsRequest(proto.Message): - r"""A request message for Projects.ListXpnHosts. See the method +class ListZoneOperationsRequest(proto.Message): + r"""A request message for ZoneOperations.List. See the method description for details. @@ -90719,8 +95162,6 @@ class ListXpnHostsProjectsRequest(proto.Message): This field is a member of `oneof`_ ``_page_token``. project (str): Project ID for this request. - projects_list_xpn_hosts_request_resource (google.cloud.compute_v1.types.ProjectsListXpnHostsRequest): - The body resource for this request return_partial_success (bool): Opt-in for partial success behavior which provides partial results in case of failure. The @@ -90732,6 +95173,8 @@ class ListXpnHostsProjectsRequest(proto.Message): resources, with an error code. This field is a member of `oneof`_ ``_return_partial_success``. + zone (str): + Name of the zone for request. """ filter: str = proto.Field( @@ -90758,23 +95201,20 @@ class ListXpnHostsProjectsRequest(proto.Message): proto.STRING, number=227560217, ) - projects_list_xpn_hosts_request_resource: "ProjectsListXpnHostsRequest" = ( - proto.Field( - proto.MESSAGE, - number=238266391, - message="ProjectsListXpnHostsRequest", - ) - ) return_partial_success: bool = proto.Field( proto.BOOL, number=517198390, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class ListZoneOperationsRequest(proto.Message): - r"""A request message for ZoneOperations.List. See the method - description for details. +class ListZoneVmExtensionPoliciesRequest(proto.Message): + r"""A request message for ZoneVmExtensionPolicies.List. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields @@ -90897,7 +95337,7 @@ class ListZoneOperationsRequest(proto.Message): This field is a member of `oneof`_ ``_return_partial_success``. zone (str): - Name of the zone for request. + Name of the zone for this request. """ filter: str = proto.Field( @@ -93854,6 +98294,11 @@ class NetworkAttachmentConnectedEndpoint(proto.Message): This field is a member of `oneof`_ ``_project_id_or_num``. secondary_ip_cidr_ranges (MutableSequence[str]): Alias IP ranges from the same subnetwork. + service_class_id (str): + The service class id of the producer service + to which the IP was assigned. + + This field is a member of `oneof`_ ``_service_class_id``. status (str): The status of a connected endpoint to this network attachment. Check the Status enum for @@ -93926,6 +98371,11 @@ class Status(proto.Enum): proto.STRING, number=117184788, ) + service_class_id: str = proto.Field( + proto.STRING, + number=422763404, + optional=True, + ) status: str = proto.Field( proto.STRING, number=181260274, @@ -95583,6 +100033,14 @@ class NetworkInterface(proto.Message): empty if not specified by the users. This field is a member of `oneof`_ ``_queue_count``. + service_class_id (str): + Optional. Producer Service's Service class Id for the region + of this network interface. Can only be used with + network_attachment. It is not possible to use on its own + however, network_attachment can be used without + service_class_id. + + This field is a member of `oneof`_ ``_service_class_id``. stack_type (str): The stack type for this network interface. To assign only IPv4 addresses, use IPV4_ONLY. To assign both IPv4 and IPv6 @@ -95812,6 +100270,11 @@ class StackType(proto.Enum): number=503708769, optional=True, ) + service_class_id: str = proto.Field( + proto.STRING, + number=422763404, + optional=True, + ) stack_type: str = proto.Field( proto.STRING, number=425908881, @@ -103884,6 +108347,67 @@ class PatchRegionAutoscalerRequest(proto.Message): ) +class PatchRegionBackendBucketRequest(proto.Message): + r"""A request message for RegionBackendBuckets.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + backend_bucket (str): + Name of the BackendBucket resource to patch. + backend_bucket_resource (google.cloud.compute_v1.types.BackendBucket): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. Specify a + unique request ID so that if you must retry your request, + the server will know to ignore the request if it has already + been completed. + + For example, consider a situation where you make an initial + request and the request times out. If you make the request + again with the same request ID, the server can check if + original operation with the same request ID was received, + and if so, will ignore the second request. This prevents + clients from accidentally creating duplicate commitments. + + The request ID must be a valid UUID with the exception that + zero UUID is not supported + (00000000-0000-0000-0000-000000000000). end_interface: + MixerMutationRequestBuilder + + This field is a member of `oneof`_ ``_request_id``. + """ + + backend_bucket: str = proto.Field( + proto.STRING, + number=91714037, + ) + backend_bucket_resource: "BackendBucket" = proto.Field( + proto.MESSAGE, + number=380757784, + message="BackendBucket", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + class PatchRegionBackendServiceRequest(proto.Message): r"""A request message for RegionBackendServices.Patch. See the method description for details. @@ -103990,14 +108514,210 @@ class PatchRegionCompositeHealthCheckRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - composite_health_check: str = proto.Field( + composite_health_check: str = proto.Field( + proto.STRING, + number=466984989, + ) + composite_health_check_resource: "CompositeHealthCheck" = proto.Field( + proto.MESSAGE, + number=132195824, + message="CompositeHealthCheck", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionHealthAggregationPolicyRequest(proto.Message): + r"""A request message for RegionHealthAggregationPolicies.Patch. + See the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_aggregation_policy (str): + Name of the HealthAggregationPolicy to + update. The name must be 1-63 characters long, + and comply with RFC1035. + health_aggregation_policy_resource (google.cloud.compute_v1.types.HealthAggregationPolicy): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_aggregation_policy: str = proto.Field( + proto.STRING, + number=240314354, + ) + health_aggregation_policy_resource: "HealthAggregationPolicy" = proto.Field( + proto.MESSAGE, + number=293526971, + message="HealthAggregationPolicy", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionHealthCheckRequest(proto.Message): + r"""A request message for RegionHealthChecks.Patch. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check (str): + Name of the HealthCheck resource to patch. + health_check_resource (google.cloud.compute_v1.types.HealthCheck): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + health_check_resource: "HealthCheck" = proto.Field( + proto.MESSAGE, + number=201925032, + message="HealthCheck", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class PatchRegionHealthCheckServiceRequest(proto.Message): + r"""A request message for RegionHealthCheckServices.Patch. See + the method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + health_check_service (str): + Name of the HealthCheckService to update. The + name must be 1-63 characters long, and comply + with RFC1035. + health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + The body resource for this request + project (str): + Project ID for this request. + region (str): + Name of the region scoping this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + """ + + health_check_service: str = proto.Field( proto.STRING, - number=466984989, + number=408374747, ) - composite_health_check_resource: "CompositeHealthCheck" = proto.Field( + health_check_service_resource: "HealthCheckService" = proto.Field( proto.MESSAGE, - number=132195824, - message="CompositeHealthCheck", + number=477367794, + message="HealthCheckService", ) project: str = proto.Field( proto.STRING, @@ -104014,19 +108734,19 @@ class PatchRegionCompositeHealthCheckRequest(proto.Message): ) -class PatchRegionHealthAggregationPolicyRequest(proto.Message): - r"""A request message for RegionHealthAggregationPolicies.Patch. - See the method description for details. +class PatchRegionHealthSourceRequest(proto.Message): + r"""A request message for RegionHealthSources.Patch. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_aggregation_policy (str): - Name of the HealthAggregationPolicy to - update. The name must be 1-63 characters long, - and comply with RFC1035. - health_aggregation_policy_resource (google.cloud.compute_v1.types.HealthAggregationPolicy): + health_source (str): + Name of the HealthSource to update. The name + must be 1-63 characters long, and comply with + RFC1035. + health_source_resource (google.cloud.compute_v1.types.HealthSource): The body resource for this request project (str): Project ID for this request. @@ -104056,14 +108776,14 @@ class PatchRegionHealthAggregationPolicyRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_aggregation_policy: str = proto.Field( + health_source: str = proto.Field( proto.STRING, - number=240314354, + number=376521566, ) - health_aggregation_policy_resource: "HealthAggregationPolicy" = proto.Field( + health_source_resource: "HealthSource" = proto.Field( proto.MESSAGE, - number=293526971, - message="HealthAggregationPolicy", + number=357735375, + message="HealthSource", ) project: str = proto.Field( proto.STRING, @@ -104080,17 +108800,17 @@ class PatchRegionHealthAggregationPolicyRequest(proto.Message): ) -class PatchRegionHealthCheckRequest(proto.Message): - r"""A request message for RegionHealthChecks.Patch. See the - method description for details. +class PatchRegionInstanceGroupManagerRequest(proto.Message): + r"""A request message for RegionInstanceGroupManagers.Patch. See + the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_check (str): - Name of the HealthCheck resource to patch. - health_check_resource (google.cloud.compute_v1.types.HealthCheck): + instance_group_manager (str): + The name of the instance group manager. + instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): The body resource for this request project (str): Project ID for this request. @@ -104120,14 +108840,14 @@ class PatchRegionHealthCheckRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_check: str = proto.Field( + instance_group_manager: str = proto.Field( proto.STRING, - number=308876645, + number=249363395, ) - health_check_resource: "HealthCheck" = proto.Field( + instance_group_manager_resource: "InstanceGroupManager" = proto.Field( proto.MESSAGE, - number=201925032, - message="HealthCheck", + number=261063946, + message="InstanceGroupManager", ) project: str = proto.Field( proto.STRING, @@ -104144,19 +108864,17 @@ class PatchRegionHealthCheckRequest(proto.Message): ) -class PatchRegionHealthCheckServiceRequest(proto.Message): - r"""A request message for RegionHealthCheckServices.Patch. See - the method description for details. +class PatchRegionNetworkFirewallPolicyRequest(proto.Message): + r"""A request message for RegionNetworkFirewallPolicies.Patch. + See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_check_service (str): - Name of the HealthCheckService to update. The - name must be 1-63 characters long, and comply - with RFC1035. - health_check_service_resource (google.cloud.compute_v1.types.HealthCheckService): + firewall_policy (str): + Name of the firewall policy to update. + firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): The body resource for this request project (str): Project ID for this request. @@ -104186,14 +108904,14 @@ class PatchRegionHealthCheckServiceRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_check_service: str = proto.Field( + firewall_policy: str = proto.Field( proto.STRING, - number=408374747, + number=498173265, ) - health_check_service_resource: "HealthCheckService" = proto.Field( + firewall_policy_resource: "FirewallPolicy" = proto.Field( proto.MESSAGE, - number=477367794, - message="HealthCheckService", + number=495049532, + message="FirewallPolicy", ) project: str = proto.Field( proto.STRING, @@ -104210,20 +108928,14 @@ class PatchRegionHealthCheckServiceRequest(proto.Message): ) -class PatchRegionHealthSourceRequest(proto.Message): - r"""A request message for RegionHealthSources.Patch. See the +class PatchRegionSecurityPolicyRequest(proto.Message): + r"""A request message for RegionSecurityPolicies.Patch. See the method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - health_source (str): - Name of the HealthSource to update. The name - must be 1-63 characters long, and comply with - RFC1035. - health_source_resource (google.cloud.compute_v1.types.HealthSource): - The body resource for this request project (str): Project ID for this request. region (str): @@ -104250,81 +108962,17 @@ class PatchRegionHealthSourceRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - """ - - health_source: str = proto.Field( - proto.STRING, - number=376521566, - ) - health_source_resource: "HealthSource" = proto.Field( - proto.MESSAGE, - number=357735375, - message="HealthSource", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, - ) - request_id: str = proto.Field( - proto.STRING, - number=37109963, - optional=True, - ) - - -class PatchRegionInstanceGroupManagerRequest(proto.Message): - r"""A request message for RegionInstanceGroupManagers.Patch. See - the method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - instance_group_manager (str): - The name of the instance group manager. - instance_group_manager_resource (google.cloud.compute_v1.types.InstanceGroupManager): + security_policy (str): + Name of the security policy to update. + security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): The body resource for this request - project (str): - Project ID for this request. - region (str): - Name of the region scoping this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be - a valid UUID with the exception that zero UUID - is not supported - (00000000-0000-0000-0000-000000000000). + update_mask (str): + Indicates fields to be cleared as part of + this request. - This field is a member of `oneof`_ ``_request_id``. + This field is a member of `oneof`_ ``_update_mask``. """ - instance_group_manager: str = proto.Field( - proto.STRING, - number=249363395, - ) - instance_group_manager_resource: "InstanceGroupManager" = proto.Field( - proto.MESSAGE, - number=261063946, - message="InstanceGroupManager", - ) project: str = proto.Field( proto.STRING, number=227560217, @@ -104338,74 +108986,24 @@ class PatchRegionInstanceGroupManagerRequest(proto.Message): number=37109963, optional=True, ) - - -class PatchRegionNetworkFirewallPolicyRequest(proto.Message): - r"""A request message for RegionNetworkFirewallPolicies.Patch. - See the method description for details. - - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - firewall_policy (str): - Name of the firewall policy to update. - firewall_policy_resource (google.cloud.compute_v1.types.FirewallPolicy): - The body resource for this request - project (str): - Project ID for this request. - region (str): - Name of the region scoping this request. - request_id (str): - An optional request ID to identify requests. - Specify a unique request ID so that if you must - retry your request, the server will know to - ignore the request if it has already been - completed. - - For example, consider a situation where you make - an initial request and the request times out. If - you make the request again with the same request - ID, the server can check if original operation - with the same request ID was received, and if - so, will ignore the second request. This - prevents clients from accidentally creating - duplicate commitments. - - The request ID must be - a valid UUID with the exception that zero UUID - is not supported - (00000000-0000-0000-0000-000000000000). - - This field is a member of `oneof`_ ``_request_id``. - """ - - firewall_policy: str = proto.Field( + security_policy: str = proto.Field( proto.STRING, - number=498173265, + number=171082513, ) - firewall_policy_resource: "FirewallPolicy" = proto.Field( + security_policy_resource: "SecurityPolicy" = proto.Field( proto.MESSAGE, - number=495049532, - message="FirewallPolicy", - ) - project: str = proto.Field( - proto.STRING, - number=227560217, - ) - region: str = proto.Field( - proto.STRING, - number=138946292, + number=216159612, + message="SecurityPolicy", ) - request_id: str = proto.Field( + update_mask: str = proto.Field( proto.STRING, - number=37109963, + number=500079778, optional=True, ) -class PatchRegionSecurityPolicyRequest(proto.Message): - r"""A request message for RegionSecurityPolicies.Patch. See the +class PatchRegionSnapshotSettingRequest(proto.Message): + r"""A request message for RegionSnapshotSettings.Patch. See the method description for details. @@ -104415,7 +109013,7 @@ class PatchRegionSecurityPolicyRequest(proto.Message): project (str): Project ID for this request. region (str): - Name of the region scoping this request. + Name of the region for this request. request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -104438,13 +109036,11 @@ class PatchRegionSecurityPolicyRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - security_policy (str): - Name of the security policy to update. - security_policy_resource (google.cloud.compute_v1.types.SecurityPolicy): + snapshot_settings_resource (google.cloud.compute_v1.types.SnapshotSettings): The body resource for this request update_mask (str): - Indicates fields to be cleared as part of - this request. + update_mask indicates fields to be updated as part of this + request. This field is a member of `oneof`_ ``_update_mask``. """ @@ -104462,14 +109058,10 @@ class PatchRegionSecurityPolicyRequest(proto.Message): number=37109963, optional=True, ) - security_policy: str = proto.Field( - proto.STRING, - number=171082513, - ) - security_policy_resource: "SecurityPolicy" = proto.Field( + snapshot_settings_resource: "SnapshotSettings" = proto.Field( proto.MESSAGE, - number=216159612, - message="SecurityPolicy", + number=357664495, + message="SnapshotSettings", ) update_mask: str = proto.Field( proto.STRING, @@ -110061,14 +114653,16 @@ class RawDisk(proto.Message): This field is a member of `oneof`_ ``_sha1_checksum``. source (str): - The full Google Cloud Storage URL where the raw disk image - archive is stored. The following are valid formats for the - URL: + The full Google Cloud Storage URL or Artifact Registry path + where the raw disk image archive is stored. The following + are valid formats: :: - https://storage.googleapis.com/bucket_name/image_archive_name - https://storage.googleapis.com/bucket_name/folder_name/image_archive_name + - projects/project/locations/location/repositories/repo/packages/package/versions/version_id + - projects/project/locations/location/repositories/repo/packages/package/versions/version_id@dirsum_sha256:hex_value In order to create an image, you must provide the full or partial URL of one of the following: @@ -110637,6 +115231,35 @@ def raw_page(self): ) +class RegionDiskUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + disk. If empty, the disk will be re-encrypted using the + primary version of the disk's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/location/keyRings/key_ring/cryptoKeys/key + + Where project is the project ID or project number. + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class RegionDisksAddResourcePoliciesRequest(proto.Message): r""" @@ -110909,6 +115532,97 @@ class RegionInstanceGroupManagerPatchInstanceConfigReq(proto.Message): ) +class RegionInstanceGroupManagerResizeRequestsListResponse(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + + This field is a member of `oneof`_ ``_etag``. + id (str): + Output only. [Output Only] Unique identifier for the + resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest]): + A list of Resize Request resources. + kind (str): + Output only. [Output Only] Type of the resource. + Alwayscompute#regionInstanceGroupManagerResizeRequestList + for a list of Resize Requests. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + Output only. [Output Only] This token allows you to get the + next page of results for list requests. If the number of + results is larger thanmaxResults, use the nextPageToken as a + value for the query parameter pageToken in the next list + request. Subsequent list requests will have their own + nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + Output only. [Output Only] Server-defined URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + Output only. [Output Only] Unreachable resources. + end_interface: MixerListResponseWithEtagBuilder + warning (google.cloud.compute_v1.types.Warning): + Output only. [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["InstanceGroupManagerResizeRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="InstanceGroupManagerResizeRequest", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class RegionInstanceGroupManagerUpdateInstanceConfigReq(proto.Message): r"""RegionInstanceGroupManagers.updatePerInstanceConfigs @@ -111886,6 +116600,33 @@ class RegionSetPolicyRequest(proto.Message): ) +class RegionSnapshotUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + snapshot. If empty, the snapshot will be re-encrypted using + the primary version of the snapshot's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/region/keyRings/region/cryptoKeys/key + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class RegionTargetHttpsProxiesSetSslCertificatesRequest(proto.Message): r""" @@ -113252,6 +117993,11 @@ class Reservation(proto.Message): tied to a commitment. This field is a member of `oneof`_ ``_commitment``. + confidential_compute_type (str): + Check the ConfidentialComputeType enum for + the list of possible values. + + This field is a member of `oneof`_ ``_confidential_compute_type``. creation_timestamp (str): Output only. [Output Only] Creation timestamp inRFC3339 text format. @@ -113410,6 +118156,23 @@ class Reservation(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class ConfidentialComputeType(proto.Enum): + r""" + + Values: + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE (0): + A value indicating that the enum field is not + set. + CONFIDENTIAL_COMPUTE_TYPE_TDX (301241954): + Intel Trust Domain Extensions. + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED (42227601): + No description available. + """ + + UNDEFINED_CONFIDENTIAL_COMPUTE_TYPE = 0 + CONFIDENTIAL_COMPUTE_TYPE_TDX = 301241954 + CONFIDENTIAL_COMPUTE_TYPE_UNSPECIFIED = 42227601 + class DeploymentType(proto.Enum): r"""Specifies the deployment strategy for this reservation. @@ -113560,6 +118323,11 @@ class Status(proto.Enum): number=482134805, optional=True, ) + confidential_compute_type: str = proto.Field( + proto.STRING, + number=386447257, + optional=True, + ) creation_timestamp: str = proto.Field( proto.STRING, number=30525366, @@ -126509,6 +131277,40 @@ class SetIamPolicyInstanceTemplateRequest(proto.Message): ) +class SetIamPolicyInstantSnapshotGroupRequest(proto.Message): + r"""A request message for InstantSnapshotGroups.SetIamPolicy. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + zone (str): + The name of the zone for this request. + zone_set_policy_request_resource (google.cloud.compute_v1.types.ZoneSetPolicyRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + zone_set_policy_request_resource: "ZoneSetPolicyRequest" = proto.Field( + proto.MESSAGE, + number=382082107, + message="ZoneSetPolicyRequest", + ) + + class SetIamPolicyInstantSnapshotRequest(proto.Message): r"""A request message for InstantSnapshots.SetIamPolicy. See the method description for details. @@ -126786,6 +131588,40 @@ class SetIamPolicyNodeTemplateRequest(proto.Message): ) +class SetIamPolicyRegionBackendBucketRequest(proto.Message): + r"""A request message for RegionBackendBuckets.SetIamPolicy. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: "RegionSetPolicyRequest" = proto.Field( + proto.MESSAGE, + number=276489091, + message="RegionSetPolicyRequest", + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + class SetIamPolicyRegionBackendServiceRequest(proto.Message): r"""A request message for RegionBackendServices.SetIamPolicy. See the method description for details. @@ -126854,6 +131690,41 @@ class SetIamPolicyRegionDiskRequest(proto.Message): ) +class SetIamPolicyRegionInstantSnapshotGroupRequest(proto.Message): + r"""A request message for + RegionInstantSnapshotGroups.SetIamPolicy. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: "RegionSetPolicyRequest" = proto.Field( + proto.MESSAGE, + number=276489091, + message="RegionSetPolicyRequest", + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + class SetIamPolicyRegionInstantSnapshotRequest(proto.Message): r"""A request message for RegionInstantSnapshots.SetIamPolicy. See the method description for details. @@ -126923,6 +131794,40 @@ class SetIamPolicyRegionNetworkFirewallPolicyRequest(proto.Message): ) +class SetIamPolicyRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.SetIamPolicy. See the + method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + region_set_policy_request_resource (google.cloud.compute_v1.types.RegionSetPolicyRequest): + The body resource for this request + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_policy_request_resource: "RegionSetPolicyRequest" = proto.Field( + proto.MESSAGE, + number=276489091, + message="RegionSetPolicyRequest", + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + class SetIamPolicyReservationBlockRequest(proto.Message): r"""A request message for ReservationBlocks.SetIamPolicy. See the method description for details. @@ -128078,6 +132983,70 @@ class SetLabelsRegionSecurityPolicyRequest(proto.Message): ) +class SetLabelsRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.SetLabels. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + region (str): + The region for this request. + region_set_labels_request_resource (google.cloud.compute_v1.types.RegionSetLabelsRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + resource (str): + Name or id of the resource for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_set_labels_request_resource: "RegionSetLabelsRequest" = proto.Field( + proto.MESSAGE, + number=259357782, + message="RegionSetLabelsRequest", + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + + class SetLabelsSecurityPolicyRequest(proto.Message): r"""A request message for SecurityPolicies.SetLabels. See the method description for details. @@ -131075,6 +136044,11 @@ class Snapshot(proto.Message): request, but not persisted as part of resource payload. This field is a member of `oneof`_ ``_params``. + region (str): + Output only. [Output Only] URL of the region where the + snapshot resides. Only applicable for regional snapshots. + + This field is a member of `oneof`_ ``_region``. satisfies_pzi (bool): Output only. Reserved for future use. @@ -131109,6 +136083,18 @@ class Snapshot(proto.Message): snapshot later. This field is a member of `oneof`_ ``_snapshot_encryption_key``. + snapshot_group_id (str): + Output only. [Output Only] The unique ID of the snapshot + group that this snapshot belongs to. The usage of snapshot + group feature is restricted. + + This field is a member of `oneof`_ ``_snapshot_group_id``. + snapshot_group_name (str): + Output only. [Output only] The snapshot group that this + snapshot belongs to. The usage of snapshot group feature is + restricted. + + This field is a member of `oneof`_ ``_snapshot_group_name``. snapshot_type (str): Indicates the type of the snapshot. Check the SnapshotType enum for the list of @@ -131390,6 +136376,11 @@ class StorageBytesStatus(proto.Enum): optional=True, message="SnapshotParams", ) + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) satisfies_pzi: bool = proto.Field( proto.BOOL, number=480964257, @@ -131411,6 +136402,16 @@ class StorageBytesStatus(proto.Enum): optional=True, message="CustomerEncryptionKey", ) + snapshot_group_id: str = proto.Field( + proto.STRING, + number=255004406, + optional=True, + ) + snapshot_group_name: str = proto.Field( + proto.STRING, + number=246247846, + optional=True, + ) snapshot_type: str = proto.Field( proto.STRING, number=124349653, @@ -131484,6 +136485,56 @@ class StorageBytesStatus(proto.Enum): ) +class SnapshotGroupParameters(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + replica_zones (MutableSequence[str]): + URLs of the zones where disks should be + replicated to. Only applicable for regional + resources. + source_snapshot_group (str): + The source snapshot group used to create + disks. You can provide this as a partial or full + URL to the resource. For example, the following + are valid values: + + + - + https://www.googleapis.com/compute/v1/projects/project/global/snapshotGroups/snapshotGroup + - + projects/project/global/snapshotGroups/snapshotGroup + - global/snapshotGroups/snapshotGroup + + This field is a member of `oneof`_ ``_source_snapshot_group``. + type_ (str): + URL of the disk type resource describing + which disk type to use to create disks. Provide + this when creating the disk. For + example:projects/project/zones/zone/diskTypes/pd-ssd. + See Persistent disk types. + + This field is a member of `oneof`_ ``_type``. + """ + + replica_zones: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=48438272, + ) + source_snapshot_group: str = proto.Field( + proto.STRING, + number=225130728, + optional=True, + ) + type_: str = proto.Field( + proto.STRING, + number=3575610, + optional=True, + ) + + class SnapshotList(proto.Message): r"""Contains a list of Snapshot resources. @@ -131586,6 +136637,11 @@ class SnapshotSettings(proto.Message): .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + access_location (google.cloud.compute_v1.types.SnapshotSettingsAccessLocation): + (Regional snapshots use only)Policy of which + location is allowed to access snapshot. + + This field is a member of `oneof`_ ``_access_location``. storage_location (google.cloud.compute_v1.types.SnapshotSettingsStorageLocationSettings): Policy of which storage location is going to be resolved, and additional data that @@ -131595,6 +136651,12 @@ class SnapshotSettings(proto.Message): This field is a member of `oneof`_ ``_storage_location``. """ + access_location: "SnapshotSettingsAccessLocation" = proto.Field( + proto.MESSAGE, + number=18410544, + optional=True, + message="SnapshotSettingsAccessLocation", + ) storage_location: "SnapshotSettingsStorageLocationSettings" = proto.Field( proto.MESSAGE, number=460859641, @@ -131603,6 +136665,79 @@ class SnapshotSettings(proto.Message): ) +class SnapshotSettingsAccessLocation(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + locations (MutableMapping[str, google.cloud.compute_v1.types.SnapshotSettingsAccessLocationAccessLocationPreference]): + List of regions that can restore a regional + snapshot from the current region + policy (str): + Policy of which location is allowed to access + snapshot. Check the Policy enum for the list of + possible values. + + This field is a member of `oneof`_ ``_policy``. + """ + + class Policy(proto.Enum): + r"""Policy of which location is allowed to access snapshot. + + Values: + UNDEFINED_POLICY (0): + A value indicating that the enum field is not + set. + ALL_REGIONS (273143585): + Any regions will be able to access the source + location. + POLICY_UNSPECIFIED (197974922): + No description available. + SPECIFIC_REGIONS (454128082): + Only allowlisted regions will be able to + restore region scoped snapshots + """ + + UNDEFINED_POLICY = 0 + ALL_REGIONS = 273143585 + POLICY_UNSPECIFIED = 197974922 + SPECIFIC_REGIONS = 454128082 + + locations: MutableMapping[ + str, "SnapshotSettingsAccessLocationAccessLocationPreference" + ] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=413423454, + message="SnapshotSettingsAccessLocationAccessLocationPreference", + ) + policy: str = proto.Field( + proto.STRING, + number=91071794, + optional=True, + ) + + +class SnapshotSettingsAccessLocationAccessLocationPreference(proto.Message): + r"""A structure for specifying an allowed target region. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + region (str): + Accessible region name + + This field is a member of `oneof`_ ``_region``. + """ + + region: str = proto.Field( + proto.STRING, + number=138946292, + optional=True, + ) + + class SnapshotSettingsStorageLocationSettings(proto.Message): r""" @@ -131688,6 +136823,33 @@ class SnapshotSettingsStorageLocationSettingsStorageLocationPreference(proto.Mes ) +class SnapshotUpdateKmsKeyRequest(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + kms_key_name (str): + Optional. The new KMS key to replace the current one on the + snapshot. If empty, the snapshot will be re-encrypted using + the primary version of the snapshot's current KMS key. + + The KMS key can be provided in the following formats: + + :: + + - projects/project_id/locations/region/keyRings/key_ring/cryptoKeys/key + + This field is a member of `oneof`_ ``_kms_key_name``. + """ + + kms_key_name: str = proto.Field( + proto.STRING, + number=484373913, + optional=True, + ) + + class SourceDiskEncryptionKey(proto.Message): r""" @@ -141019,6 +146181,41 @@ class TestIamPermissionsInstanceTemplateRequest(proto.Message): ) +class TestIamPermissionsInstantSnapshotGroupRequest(proto.Message): + r"""A request message for + InstantSnapshotGroups.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + zone (str): + The name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class TestIamPermissionsInstantSnapshotRequest(proto.Message): r"""A request message for InstantSnapshots.TestIamPermissions. See the method description for details. @@ -141428,6 +146625,41 @@ class TestIamPermissionsRegionAutoscalerRequest(proto.Message): ) +class TestIamPermissionsRegionBackendBucketRequest(proto.Message): + r"""A request message for + RegionBackendBuckets.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + class TestIamPermissionsRegionBackendServiceRequest(proto.Message): r"""A request message for RegionBackendServices.TestIamPermissions. See the method @@ -141705,6 +146937,41 @@ class TestIamPermissionsRegionInstanceGroupRequest(proto.Message): ) +class TestIamPermissionsRegionInstantSnapshotGroupRequest(proto.Message): + r"""A request message for + RegionInstantSnapshotGroups.TestIamPermissions. See the method + description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + class TestIamPermissionsRegionInstantSnapshotRequest(proto.Message): r"""A request message for RegionInstantSnapshots.TestIamPermissions. See the method @@ -141810,6 +147077,40 @@ class TestIamPermissionsRegionNotificationEndpointRequest(proto.Message): ) +class TestIamPermissionsRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.TestIamPermissions. See + the method description for details. + + Attributes: + project (str): + Project ID for this request. + region (str): + The name of the region for this request. + resource (str): + Name or id of the resource for this request. + test_permissions_request_resource (google.cloud.compute_v1.types.TestPermissionsRequest): + The body resource for this request + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + resource: str = proto.Field( + proto.STRING, + number=195806222, + ) + test_permissions_request_resource: "TestPermissionsRequest" = proto.Field( + proto.MESSAGE, + number=439214758, + message="TestPermissionsRequest", + ) + + class TestIamPermissionsReservationBlockRequest(proto.Message): r"""A request message for ReservationBlocks.TestIamPermissions. See the method description for details. @@ -143140,14 +148441,212 @@ class UpdateHealthCheckRequest(proto.Message): This field is a member of `oneof`_ ``_request_id``. """ - health_check: str = proto.Field( + health_check: str = proto.Field( + proto.STRING, + number=308876645, + ) + health_check_resource: "HealthCheck" = proto.Field( + proto.MESSAGE, + number=201925032, + message="HealthCheck", + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + + +class UpdateInstanceRequest(proto.Message): + r"""A request message for Instances.Update. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + discard_local_ssd (bool): + Whether to discard local SSDs from the + instance during restart default value is false. + + This field is a member of `oneof`_ ``_discard_local_ssd``. + instance (str): + Name of the instance resource to update. + instance_resource (google.cloud.compute_v1.types.Instance): + The body resource for this request + minimal_action (str): + Specifies the action to take when updating an + instance even if the updated properties do not + require it. If not specified, then Compute + Engine acts based on the minimum action that the + updated properties require. + Check the MinimalAction enum for the list of + possible values. + + This field is a member of `oneof`_ ``_minimal_action``. + most_disruptive_allowed_action (str): + Specifies the most disruptive action that can be taken on + the instance as part of the update. Compute Engine returns + an error if the instance properties require a more + disruptive action as part of the instance update. Valid + options from lowest to highest are NO_EFFECT, REFRESH, and + RESTART. Check the MostDisruptiveAllowedAction enum for the + list of possible values. + + This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + class MinimalAction(proto.Enum): + r"""Specifies the action to take when updating an instance even if the + updated properties do not require it. If not specified, then Compute + Engine acts based on the minimum action that the updated properties + require. Additional supported values which may be not listed in the + enum directly due to technical reasons: INVALID NO_EFFECT REFRESH + RESTART + + Values: + UNDEFINED_MINIMAL_ACTION (0): + A value indicating that the enum field is not + set. + """ + + UNDEFINED_MINIMAL_ACTION = 0 + + class MostDisruptiveAllowedAction(proto.Enum): + r"""Specifies the most disruptive action that can be taken on the + instance as part of the update. Compute Engine returns an error if + the instance properties require a more disruptive action as part of + the instance update. Valid options from lowest to highest are + NO_EFFECT, REFRESH, and RESTART. Additional supported values which + may be not listed in the enum directly due to technical reasons: + INVALID NO_EFFECT REFRESH RESTART + + Values: + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION (0): + A value indicating that the enum field is not + set. + """ + + UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION = 0 + + discard_local_ssd: bool = proto.Field( + proto.BOOL, + number=319517903, + optional=True, + ) + instance: str = proto.Field( + proto.STRING, + number=18257045, + ) + instance_resource: "Instance" = proto.Field( + proto.MESSAGE, + number=215988344, + message="Instance", + ) + minimal_action: str = proto.Field( + proto.STRING, + number=270567060, + optional=True, + ) + most_disruptive_allowed_action: str = proto.Field( + proto.STRING, + number=66103053, + optional=True, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + +class UpdateKmsKeyDiskRequest(proto.Message): + r"""A request message for Disks.UpdateKmsKey. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + disk (str): + Name of the Disk resource, should conform to + RFC1035. + disk_update_kms_key_request_resource (google.cloud.compute_v1.types.DiskUpdateKmsKeyRequest): + The body resource for this request + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + zone (str): + The name of the zone for this request. + """ + + disk: str = proto.Field( proto.STRING, - number=308876645, + number=3083677, ) - health_check_resource: "HealthCheck" = proto.Field( + disk_update_kms_key_request_resource: "DiskUpdateKmsKeyRequest" = proto.Field( proto.MESSAGE, - number=201925032, - message="HealthCheck", + number=178025952, + message="DiskUpdateKmsKeyRequest", ) project: str = proto.Field( proto.STRING, @@ -143158,47 +148657,29 @@ class UpdateHealthCheckRequest(proto.Message): number=37109963, optional=True, ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) -class UpdateInstanceRequest(proto.Message): - r"""A request message for Instances.Update. See the method - description for details. +class UpdateKmsKeyRegionDiskRequest(proto.Message): + r"""A request message for RegionDisks.UpdateKmsKey. See the + method description for details. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: - discard_local_ssd (bool): - Whether to discard local SSDs from the - instance during restart default value is false. - - This field is a member of `oneof`_ ``_discard_local_ssd``. - instance (str): - Name of the instance resource to update. - instance_resource (google.cloud.compute_v1.types.Instance): - The body resource for this request - minimal_action (str): - Specifies the action to take when updating an - instance even if the updated properties do not - require it. If not specified, then Compute - Engine acts based on the minimum action that the - updated properties require. - Check the MinimalAction enum for the list of - possible values. - - This field is a member of `oneof`_ ``_minimal_action``. - most_disruptive_allowed_action (str): - Specifies the most disruptive action that can be taken on - the instance as part of the update. Compute Engine returns - an error if the instance properties require a more - disruptive action as part of the instance update. Valid - options from lowest to highest are NO_EFFECT, REFRESH, and - RESTART. Check the MostDisruptiveAllowedAction enum for the - list of possible values. - - This field is a member of `oneof`_ ``_most_disruptive_allowed_action``. + disk (str): + Name of the Disk resource, should conform to + RFC1035. project (str): Project ID for this request. + region (str): + The name of the region for this request. + region_disk_update_kms_key_request_resource (google.cloud.compute_v1.types.RegionDiskUpdateKmsKeyRequest): + The body resource for this request request_id (str): An optional request ID to identify requests. Specify a unique request ID so that if you must @@ -143221,67 +148702,138 @@ class UpdateInstanceRequest(proto.Message): (00000000-0000-0000-0000-000000000000). This field is a member of `oneof`_ ``_request_id``. - zone (str): - The name of the zone for this request. """ - class MinimalAction(proto.Enum): - r"""Specifies the action to take when updating an instance even if the - updated properties do not require it. If not specified, then Compute - Engine acts based on the minimum action that the updated properties - require. Additional supported values which may be not listed in the - enum directly due to technical reasons: INVALID NO_EFFECT REFRESH - RESTART + disk: str = proto.Field( + proto.STRING, + number=3083677, + ) + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + region: str = proto.Field( + proto.STRING, + number=138946292, + ) + region_disk_update_kms_key_request_resource: "RegionDiskUpdateKmsKeyRequest" = ( + proto.Field( + proto.MESSAGE, + number=10015819, + message="RegionDiskUpdateKmsKeyRequest", + ) + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) - Values: - UNDEFINED_MINIMAL_ACTION (0): - A value indicating that the enum field is not - set. - """ - UNDEFINED_MINIMAL_ACTION = 0 +class UpdateKmsKeyRegionSnapshotRequest(proto.Message): + r"""A request message for RegionSnapshots.UpdateKmsKey. See the + method description for details. - class MostDisruptiveAllowedAction(proto.Enum): - r"""Specifies the most disruptive action that can be taken on the - instance as part of the update. Compute Engine returns an error if - the instance properties require a more disruptive action as part of - the instance update. Valid options from lowest to highest are - NO_EFFECT, REFRESH, and RESTART. Additional supported values which - may be not listed in the enum directly due to technical reasons: - INVALID NO_EFFECT REFRESH RESTART - Values: - UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION (0): - A value indicating that the enum field is not - set. - """ + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - UNDEFINED_MOST_DISRUPTIVE_ALLOWED_ACTION = 0 + Attributes: + project (str): + Project ID for this request. + region (str): + Name of the region for this request. + region_snapshot_update_kms_key_request_resource (google.cloud.compute_v1.types.RegionSnapshotUpdateKmsKeyRequest): + The body resource for this request + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. - discard_local_ssd: bool = proto.Field( - proto.BOOL, - number=319517903, - optional=True, + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot (str): + Name of the snapshot resource to update. + Should conform to RFC1035. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, ) - instance: str = proto.Field( + region: str = proto.Field( proto.STRING, - number=18257045, + number=138946292, ) - instance_resource: "Instance" = proto.Field( + region_snapshot_update_kms_key_request_resource: "RegionSnapshotUpdateKmsKeyRequest" = proto.Field( proto.MESSAGE, - number=215988344, - message="Instance", + number=279384818, + message="RegionSnapshotUpdateKmsKeyRequest", ) - minimal_action: str = proto.Field( + request_id: str = proto.Field( proto.STRING, - number=270567060, + number=37109963, optional=True, ) - most_disruptive_allowed_action: str = proto.Field( + snapshot: str = proto.Field( proto.STRING, - number=66103053, - optional=True, + number=284874180, ) + + +class UpdateKmsKeySnapshotRequest(proto.Message): + r"""A request message for Snapshots.UpdateKmsKey. See the method + description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + snapshot (str): + Name of the snapshot resource to update. + Should conform to RFC1035. + snapshot_update_kms_key_request_resource (google.cloud.compute_v1.types.SnapshotUpdateKmsKeyRequest): + The body resource for this request + """ + project: str = proto.Field( proto.STRING, number=227560217, @@ -143291,9 +148843,16 @@ class MostDisruptiveAllowedAction(proto.Enum): number=37109963, optional=True, ) - zone: str = proto.Field( + snapshot: str = proto.Field( proto.STRING, - number=3744684, + number=284874180, + ) + snapshot_update_kms_key_request_resource: "SnapshotUpdateKmsKeyRequest" = ( + proto.Field( + proto.MESSAGE, + number=103145991, + message="SnapshotUpdateKmsKeyRequest", + ) ) @@ -144559,6 +150118,71 @@ class UpdateUrlMapRequest(proto.Message): ) +class UpdateZoneVmExtensionPolicyRequest(proto.Message): + r"""A request message for ZoneVmExtensionPolicies.Update. See the + method description for details. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + project (str): + Project ID for this request. + request_id (str): + An optional request ID to identify requests. + Specify a unique request ID so that if you must + retry your request, the server will know to + ignore the request if it has already been + completed. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be + a valid UUID with the exception that zero UUID + is not supported + (00000000-0000-0000-0000-000000000000). + + This field is a member of `oneof`_ ``_request_id``. + vm_extension_policy (str): + Name of the zone VM extension policy to + update. + vm_extension_policy_resource (google.cloud.compute_v1.types.VmExtensionPolicy): + The body resource for this request + zone (str): + Name of the zone for this request. + """ + + project: str = proto.Field( + proto.STRING, + number=227560217, + ) + request_id: str = proto.Field( + proto.STRING, + number=37109963, + optional=True, + ) + vm_extension_policy: str = proto.Field( + proto.STRING, + number=331532730, + ) + vm_extension_policy_resource: "VmExtensionPolicy" = proto.Field( + proto.MESSAGE, + number=515669235, + message="VmExtensionPolicy", + ) + zone: str = proto.Field( + proto.STRING, + number=3744684, + ) + + class UrlMap(proto.Message): r"""Represents a URL Map resource. @@ -146144,6 +151768,375 @@ def raw_page(self): ) +class VmExtensionPolicy(proto.Message): + r"""Represents a VM extension policy. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + creation_timestamp (str): + Output only. [Output Only] Creation timestamp inRFC3339 text + format. + + This field is a member of `oneof`_ ``_creation_timestamp``. + description (str): + An optional description of this resource. + + This field is a member of `oneof`_ ``_description``. + extension_policies (MutableMapping[str, google.cloud.compute_v1.types.VmExtensionPolicyExtensionPolicy]): + Required. A map of extension names (for + example, "ops-agent") to their corresponding + policy configurations. + global_resource_link (str): + Optional. Output only. [Output Only] Link to the global + policy that manages this zone policy, if applicable. + + This field is a member of `oneof`_ ``_global_resource_link``. + id (int): + Output only. [Output Only] The unique identifier for the + resource. This identifier is defined by the server. + + This field is a member of `oneof`_ ``_id``. + instance_selectors (MutableSequence[google.cloud.compute_v1.types.VmExtensionPolicyInstanceSelector]): + Optional. Selectors to target VMs for this policy. VMs are + selected if they match *any* of the provided selectors + (logical OR). If this list is empty, the policy applies to + all VMs. + kind (str): + Output only. [Output Only] Type of the resource. + Alwayscompute#vmExtensionPolicy. + + This field is a member of `oneof`_ ``_kind``. + managed_by_global (bool): + Optional. Output only. [Output Only] Indicates if this + policy is managed by a global policy. + + This field is a member of `oneof`_ ``_managed_by_global``. + name (str): + Name of the resource. Provided by the client when the + resource is created. The name must be 1-63 characters long, + and comply withRFC1035. Specifically, the name must be 1-63 + characters long and match the regular expression + ``[a-z]([-a-z0-9]*[a-z0-9])?`` which means the first + character must be a lowercase letter, and all following + characters must be a dash, lowercase letter, or digit, + except the last character, which cannot be a dash. + + This field is a member of `oneof`_ ``_name``. + priority (int): + Optional. Priority of this policy. Used to + resolve conflicts when multiple policies apply + to the same extension. The policy priority is an + integer from 0 to 65535, inclusive. Lower + integers indicate higher priorities. If you do + not specify a priority when creating a rule, it + is assigned a priority of 1000. If priorities + are equal, the policy with the most recent + creation timestamp takes precedence. + + This field is a member of `oneof`_ ``_priority``. + self_link (str): + Output only. [Output Only] Server-defined fully-qualified + URL for this resource. + + This field is a member of `oneof`_ ``_self_link``. + self_link_with_id (str): + Output only. [Output Only] Server-defined URL for this + resource's resource id. + + This field is a member of `oneof`_ ``_self_link_with_id``. + state (str): + Optional. Output only. [Output Only] Current state of the + policy: ACTIVE or DELETING. Check the State enum for the + list of possible values. + + This field is a member of `oneof`_ ``_state``. + update_timestamp (str): + Output only. [Output Only] Update timestamp inRFC3339 text + format. + + This field is a member of `oneof`_ ``_update_timestamp``. + """ + + class State(proto.Enum): + r"""Optional. Output only. [Output Only] Current state of the policy: + ACTIVE or DELETING. + + Values: + UNDEFINED_STATE (0): + A value indicating that the enum field is not + set. + ACTIVE (314733318): + The policy is active and applied to matching + VMs. Newly created VMs that match the policy + will also receive the extension policy. + DELETING (528602024): + The policy is in the process of being + deleted. After the extension is removed from all + matching VMs, the policy will be deleted. + STATE_UNSPECIFIED (470755401): + Default value. Do not use. + """ + + UNDEFINED_STATE = 0 + ACTIVE = 314733318 + DELETING = 528602024 + STATE_UNSPECIFIED = 470755401 + + creation_timestamp: str = proto.Field( + proto.STRING, + number=30525366, + optional=True, + ) + description: str = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + extension_policies: MutableMapping[str, "VmExtensionPolicyExtensionPolicy"] = ( + proto.MapField( + proto.STRING, + proto.MESSAGE, + number=227761488, + message="VmExtensionPolicyExtensionPolicy", + ) + ) + global_resource_link: str = proto.Field( + proto.STRING, + number=516569583, + optional=True, + ) + id: int = proto.Field( + proto.UINT64, + number=3355, + optional=True, + ) + instance_selectors: MutableSequence["VmExtensionPolicyInstanceSelector"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=520298826, + message="VmExtensionPolicyInstanceSelector", + ) + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + managed_by_global: bool = proto.Field( + proto.BOOL, + number=88605675, + optional=True, + ) + name: str = proto.Field( + proto.STRING, + number=3373707, + optional=True, + ) + priority: int = proto.Field( + proto.INT32, + number=445151652, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + self_link_with_id: str = proto.Field( + proto.STRING, + number=44520962, + optional=True, + ) + state: str = proto.Field( + proto.STRING, + number=109757585, + optional=True, + ) + update_timestamp: str = proto.Field( + proto.STRING, + number=120894752, + optional=True, + ) + + +class VmExtensionPolicyExtensionPolicy(proto.Message): + r"""Configuration for a specific VM extension. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + pinned_version (str): + Optional. The specific version of the + extension to install. If not set, the latest + version is used. + + This field is a member of `oneof`_ ``_pinned_version``. + string_config (str): + Optional. String-based configuration data for + the extension. + + This field is a member of `oneof`_ ``_string_config``. + """ + + pinned_version: str = proto.Field( + proto.STRING, + number=446267249, + optional=True, + ) + string_config: str = proto.Field( + proto.STRING, + number=318905136, + optional=True, + ) + + +class VmExtensionPolicyInstanceSelector(proto.Message): + r"""Defines how to select VMs to apply a zone VM extension + policy. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + label_selector (google.cloud.compute_v1.types.VmExtensionPolicyLabelSelector): + Optional. LabelSelector selects VMs based on + their labels. + + This field is a member of `oneof`_ ``_label_selector``. + """ + + label_selector: "VmExtensionPolicyLabelSelector" = proto.Field( + proto.MESSAGE, + number=349780170, + optional=True, + message="VmExtensionPolicyLabelSelector", + ) + + +class VmExtensionPolicyLabelSelector(proto.Message): + r"""A LabelSelector is applied to a VM only if it matches all the + specified labels. + + Attributes: + inclusion_labels (MutableMapping[str, str]): + Optional. A map of key-value pairs representing VM labels. + VMs must have all of the labels specified in this map to be + selected (logical AND). + + e.g. If the ``inclusion_labels`` are {("key1", "value1"), + ("key2", "value2")}, the VM labels must contain both + ("key1", "value1") and ("key2", "value2") to be selected. If + the VM labels are ("key1", "value1") and ("something", + "else"), it will not be selected. + + If the map is empty, it's considered a match. + """ + + inclusion_labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=501312642, + ) + + +class VmExtensionPolicyList(proto.Message): + r""" + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + etag (str): + Output only. [Output Only] Fingerprint of this resource. A + hash of the contents stored in this object. This field is + used in optimistic locking. This field will be ignored when + inserting a VmExtensionPolicy. An up-to-date fingerprint + must be provided in order to update the VmExtensionPolicy. + + To see the latest value of the fingerprint, make a get() + request to retrieve a VmExtensionPolicy. + + This field is a member of `oneof`_ ``_etag``. + id (str): + Output only. [Output Only] Unique identifier for the + resource; defined by the server. + + This field is a member of `oneof`_ ``_id``. + items (MutableSequence[google.cloud.compute_v1.types.VmExtensionPolicy]): + Output only. [Output Only] A list of VM extension policy + resources. + kind (str): + Output only. Type of resource. + + This field is a member of `oneof`_ ``_kind``. + next_page_token (str): + Output only. [Output Only] This token allows you to get the + next page of results for list requests. If the number of + results is larger thanmaxResults, use the nextPageToken as a + value for the query parameter pageToken in the next list + request. Subsequent list requests will have their own + nextPageToken to continue paging through the results. + + This field is a member of `oneof`_ ``_next_page_token``. + self_link (str): + Output only. [Output Only] Server-defined URL for this + resource. + + This field is a member of `oneof`_ ``_self_link``. + unreachables (MutableSequence[str]): + Output only. [Output Only] Unreachable resources. + warning (google.cloud.compute_v1.types.Warning): + Output only. [Output Only] Informational warning message. + + This field is a member of `oneof`_ ``_warning``. + """ + + @property + def raw_page(self): + return self + + etag: str = proto.Field( + proto.STRING, + number=3123477, + optional=True, + ) + id: str = proto.Field( + proto.STRING, + number=3355, + optional=True, + ) + items: MutableSequence["VmExtensionPolicy"] = proto.RepeatedField( + proto.MESSAGE, + number=100526016, + message="VmExtensionPolicy", + ) + kind: str = proto.Field( + proto.STRING, + number=3292052, + optional=True, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=79797525, + optional=True, + ) + self_link: str = proto.Field( + proto.STRING, + number=456214797, + optional=True, + ) + unreachables: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=243372063, + ) + warning: "Warning" = proto.Field( + proto.MESSAGE, + number=50704284, + optional=True, + message="Warning", + ) + + class VpnGateway(proto.Message): r"""Represents a HA VPN gateway. diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_aggregated_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_aggregated_list_sync.py new file mode 100644 index 000000000000..c10e2ebe79e6 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_aggregated_list_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for AggregatedList +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_AggregatedList_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_aggregated_list(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.AggregatedListBackendBucketsRequest( + project="project_value", + ) + + # Make the request + page_result = client.aggregated_list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_BackendBuckets_AggregatedList_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_list_usable_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_list_usable_sync.py new file mode 100644 index 000000000000..3523c8c35af9 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_backend_buckets_list_usable_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_BackendBuckets_ListUsable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_usable(): + # Create a client + client = compute_v1.BackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableBackendBucketsRequest( + project="project_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_BackendBuckets_ListUsable_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_disks_update_kms_key_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_disks_update_kms_key_sync.py new file mode 100644 index 000000000000..dd87322ceeff --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_disks_update_kms_key_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Disks_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_kms_key(): + # Create a client + client = compute_v1.DisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyDiskRequest( + disk="disk_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_Disks_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_delete_sync.py new file mode 100644 index 000000000000..beae362ab29c --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_delete_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_get_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_get_iam_policy_sync.py new file mode 100644 index 000000000000..8fab2f5625a4 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_get_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyInstantSnapshotGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_GetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_get_sync.py new file mode 100644 index 000000000000..9e765f9fe119 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_get_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_insert_sync.py new file mode 100644 index 000000000000..00b96fb428c6 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertInstantSnapshotGroupRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_list_sync.py new file mode 100644 index 000000000000..a564ed129731 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_list_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListInstantSnapshotGroupsRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_set_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_set_iam_policy_sync.py new file mode 100644 index 000000000000..9e3dd72c92a9 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_set_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyInstantSnapshotGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_SetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_test_iam_permissions_sync.py new file mode 100644 index 000000000000..bdf7dd4a327a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_instant_snapshot_groups_test_iam_permissions_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_InstantSnapshotGroups_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.InstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsInstantSnapshotGroupRequest( + project="project_value", + resource="resource_value", + zone="zone_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_InstantSnapshotGroups_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_delete_sync.py new file mode 100644 index 000000000000..dc905e587d6c --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_delete_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_get_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_get_iam_policy_sync.py new file mode 100644 index 000000000000..526ee7f28839 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_get_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionBackendBucketRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_GetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_get_sync.py new file mode 100644 index 000000000000..ed1723e04d17 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_get_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_insert_sync.py new file mode 100644 index 000000000000..4221ee217396 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionBackendBucketRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_list_sync.py new file mode 100644 index 000000000000..75a65c6423e0 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_list_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionBackendBucketsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_list_usable_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_list_usable_sync.py new file mode 100644 index 000000000000..9ff6ced4c32c --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_list_usable_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListUsable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_ListUsable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list_usable(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.ListUsableRegionBackendBucketsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list_usable(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_ListUsable_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_patch_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_patch_sync.py new file mode 100644 index 000000000000..640b3c173771 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_patch_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionBackendBucketRequest( + backend_bucket="backend_bucket_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_Patch_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_set_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_set_iam_policy_sync.py new file mode 100644 index 000000000000..44cf76c51f71 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_set_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionBackendBucketRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_SetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_test_iam_permissions_sync.py new file mode 100644 index 000000000000..3688f0351a22 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_backend_buckets_test_iam_permissions_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionBackendBuckets_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionBackendBucketsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionBackendBucketRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionBackendBuckets_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_composite_health_checks_get_health_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_composite_health_checks_get_health_sync.py new file mode 100644 index 000000000000..7ccc36bebc0a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_composite_health_checks_get_health_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionCompositeHealthChecks_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_health(): + # Create a client + client = compute_v1.RegionCompositeHealthChecksClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthRegionCompositeHealthCheckRequest( + composite_health_check="composite_health_check_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionCompositeHealthChecks_GetHealth_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_disks_update_kms_key_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_disks_update_kms_key_sync.py new file mode 100644 index 000000000000..9bfc45104f01 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_disks_update_kms_key_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionDisks_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_kms_key(): + # Create a client + client = compute_v1.RegionDisksClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyRegionDiskRequest( + disk="disk_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionDisks_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_health_sources_get_health_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_health_sources_get_health_sync.py new file mode 100644 index 000000000000..94dc07c3723a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_health_sources_get_health_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetHealth +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionHealthSources_GetHealth_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_health(): + # Create a client + client = compute_v1.RegionHealthSourcesClient() + + # Initialize request argument(s) + request = compute_v1.GetHealthRegionHealthSourceRequest( + health_source="health_source_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get_health(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionHealthSources_GetHealth_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_cancel_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_cancel_sync.py new file mode 100644 index 000000000000..beb016178de2 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_cancel_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Cancel +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Cancel_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_cancel(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.CancelRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.cancel(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Cancel_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_delete_sync.py new file mode 100644 index 000000000000..2e76e71da823 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_delete_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_get_sync.py new file mode 100644 index 000000000000..d957c5765f3b --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_get_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + resize_request="resize_request_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_insert_sync.py new file mode 100644 index 000000000000..f5fa54a0e5ed --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_insert_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstanceGroupManagerResizeRequestRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_list_sync.py new file mode 100644 index 000000000000..013b3f0a364c --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instance_group_manager_resize_requests_list_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstanceGroupManagerResizeRequests_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionInstanceGroupManagerResizeRequestsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstanceGroupManagerResizeRequestsRequest( + instance_group_manager="instance_group_manager_value", + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_RegionInstanceGroupManagerResizeRequests_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_delete_sync.py new file mode 100644 index 000000000000..4a503c91b697 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_delete_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_get_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_get_iam_policy_sync.py new file mode 100644 index 000000000000..fb65aafad1d7 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_get_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_GetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_get_sync.py new file mode 100644 index 000000000000..18e128e3a91b --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_get_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionInstantSnapshotGroupRequest( + instant_snapshot_group="instant_snapshot_group_value", + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_insert_sync.py new file mode 100644 index 000000000000..cafcc0a0952a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_list_sync.py new file mode 100644 index 000000000000..d4a0cf6b373a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_list_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionInstantSnapshotGroupsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_set_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_set_iam_policy_sync.py new file mode 100644 index 000000000000..a47fd4f321f7 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_set_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_SetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_test_iam_permissions_sync.py new file mode 100644 index 000000000000..0c9fdd96cc74 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_instant_snapshot_groups_test_iam_permissions_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionInstantSnapshotGroups_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionInstantSnapshotGroupsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionInstantSnapshotGroupRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionInstantSnapshotGroups_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshot_settings_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshot_settings_get_sync.py new file mode 100644 index 000000000000..0c83ab4b317f --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshot_settings_get_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshotSettings_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionSnapshotSettingsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSnapshotSettingRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshotSettings_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshot_settings_patch_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshot_settings_patch_sync.py new file mode 100644 index 000000000000..bcd65925fbfa --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshot_settings_patch_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Patch +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshotSettings_Patch_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_patch(): + # Create a client + client = compute_v1.RegionSnapshotSettingsClient() + + # Initialize request argument(s) + request = compute_v1.PatchRegionSnapshotSettingRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.patch(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshotSettings_Patch_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_delete_sync.py new file mode 100644 index 000000000000..352ffd6473d7 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_delete_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.DeleteRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_get_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_get_iam_policy_sync.py new file mode 100644 index 000000000000..b366a597e04c --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_get_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_GetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get_iam_policy(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetIamPolicyRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.get_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_GetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_get_sync.py new file mode 100644 index 000000000000..7f4a3f44ed82 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_get_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.GetRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_insert_sync.py new file mode 100644 index 000000000000..ac58cc9d5e2f --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.InsertRegionSnapshotRequest( + project="project_value", + region="region_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_list_sync.py new file mode 100644 index 000000000000..378a97d2710d --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_list_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.ListRegionSnapshotsRequest( + project="project_value", + region="region_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_RegionSnapshots_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_set_iam_policy_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_set_iam_policy_sync.py new file mode 100644 index 000000000000..6d40d52f86e9 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_set_iam_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetIamPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_SetIamPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_iam_policy(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetIamPolicyRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_iam_policy(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_SetIamPolicy_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_set_labels_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_set_labels_sync.py new file mode 100644 index 000000000000..e25226d8908a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_set_labels_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SetLabels +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_SetLabels_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_set_labels(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.SetLabelsRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.set_labels(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_SetLabels_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_test_iam_permissions_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_test_iam_permissions_sync.py new file mode 100644 index 000000000000..6793382f4240 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_test_iam_permissions_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for TestIamPermissions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_TestIamPermissions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_test_iam_permissions(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.TestIamPermissionsRegionSnapshotRequest( + project="project_value", + region="region_value", + resource="resource_value", + ) + + # Make the request + response = client.test_iam_permissions(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_TestIamPermissions_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_update_kms_key_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_update_kms_key_sync.py new file mode 100644 index 000000000000..b681ebf8d3f2 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_region_snapshots_update_kms_key_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_RegionSnapshots_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_kms_key(): + # Create a client + client = compute_v1.RegionSnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeyRegionSnapshotRequest( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_RegionSnapshots_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshots_update_kms_key_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshots_update_kms_key_sync.py new file mode 100644 index 000000000000..21c672176218 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_snapshots_update_kms_key_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateKmsKey +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_Snapshots_UpdateKmsKey_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update_kms_key(): + # Create a client + client = compute_v1.SnapshotsClient() + + # Initialize request argument(s) + request = compute_v1.UpdateKmsKeySnapshotRequest( + project="project_value", + snapshot="snapshot_value", + ) + + # Make the request + response = client.update_kms_key(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_Snapshots_UpdateKmsKey_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_delete_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_delete_sync.py new file mode 100644 index 000000000000..32ff232d927a --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_delete_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Delete +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneVmExtensionPolicies_Delete_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_delete(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.DeleteZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.delete(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_ZoneVmExtensionPolicies_Delete_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_get_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_get_sync.py new file mode 100644 index 000000000000..c167c18f16c9 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_get_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Get +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneVmExtensionPolicies_Get_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_get(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.GetZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.get(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_ZoneVmExtensionPolicies_Get_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_insert_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_insert_sync.py new file mode 100644 index 000000000000..41d52634e8ec --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_insert_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Insert +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneVmExtensionPolicies_Insert_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_insert(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.InsertZoneVmExtensionPolicyRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + response = client.insert(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_ZoneVmExtensionPolicies_Insert_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_list_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_list_sync.py new file mode 100644 index 000000000000..df7140bec392 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_list_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for List +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneVmExtensionPolicies_List_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_list(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.ListZoneVmExtensionPoliciesRequest( + project="project_value", + zone="zone_value", + ) + + # Make the request + page_result = client.list(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END compute_v1_generated_ZoneVmExtensionPolicies_List_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_update_sync.py b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_update_sync.py new file mode 100644 index 000000000000..25b6bce9a925 --- /dev/null +++ b/packages/google-cloud-compute/samples/generated_samples/compute_v1_generated_zone_vm_extension_policies_update_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for Update +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-compute + + +# [START compute_v1_generated_ZoneVmExtensionPolicies_Update_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import compute_v1 + + +def sample_update(): + # Create a client + client = compute_v1.ZoneVmExtensionPoliciesClient() + + # Initialize request argument(s) + request = compute_v1.UpdateZoneVmExtensionPolicyRequest( + project="project_value", + vm_extension_policy="vm_extension_policy_value", + zone="zone_value", + ) + + # Make the request + response = client.update(request=request) + + # Handle the response + print(response) + + +# [END compute_v1_generated_ZoneVmExtensionPolicies_Update_sync] diff --git a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json index be04178be060..18a681316182 100644 --- a/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json +++ b/packages/google-cloud-compute/samples/generated_samples/snippet_metadata_google.cloud.compute.v1.json @@ -1839,6 +1839,86 @@ ], "title": "compute_v1_generated_backend_buckets_add_signed_url_key_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListBackendBucketsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.backend_buckets.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_backend_buckets_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_aggregated_list_sync.py" + }, { "canonical": true, "clientMethod": { @@ -2263,6 +2343,86 @@ ], "title": "compute_v1_generated_backend_buckets_insert_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.BackendBucketsClient", + "shortName": "BackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.BackendBucketsClient.list_usable", + "method": { + "fullName": "google.cloud.compute.v1.BackendBuckets.ListUsable", + "service": { + "fullName": "google.cloud.compute.v1.BackendBuckets", + "shortName": "BackendBuckets" + }, + "shortName": "ListUsable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListUsableBackendBucketsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.backend_buckets.pagers.ListUsablePager", + "shortName": "list_usable" + }, + "description": "Sample for ListUsable", + "file": "compute_v1_generated_backend_buckets_list_usable_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_BackendBuckets_ListUsable_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_backend_buckets_list_usable_sync.py" + }, { "canonical": true, "clientMethod": { @@ -6511,6 +6671,98 @@ ], "title": "compute_v1_generated_disks_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.DisksClient", + "shortName": "DisksClient" + }, + "fullName": "google.cloud.compute_v1.DisksClient.update_kms_key", + "method": { + "fullName": "google.cloud.compute.v1.Disks.UpdateKmsKey", + "service": { + "fullName": "google.cloud.compute.v1.Disks", + "shortName": "Disks" + }, + "shortName": "UpdateKmsKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateKmsKeyDiskRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "disk", + "type": "str" + }, + { + "name": "disk_update_kms_key_request_resource", + "type": "google.cloud.compute_v1.types.DiskUpdateKmsKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" + }, + "description": "Sample for UpdateKmsKey", + "file": "compute_v1_generated_disks_update_kms_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Disks_UpdateKmsKey_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_disks_update_kms_key_sync.py" + }, { "canonical": true, "clientMethod": { @@ -24011,27 +24263,35 @@ "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.AggregatedList", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.Delete", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "AggregatedList" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListInstantSnapshotsRequest" + "type": "google.cloud.compute_v1.types.DeleteInstantSnapshotGroupRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "instant_snapshot_group", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -24045,22 +24305,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.instant_snapshots.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_instant_snapshots_aggregated_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_instant_snapshot_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_AggregatedList_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_Delete_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -24070,43 +24330,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_aggregated_list_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.delete", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.Delete", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "Delete" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyInstantSnapshotGroupRequest" }, { "name": "project", @@ -24117,7 +24377,7 @@ "type": "str" }, { - "name": "instant_snapshot", + "name": "resource", "type": "str" }, { @@ -24133,14 +24393,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_instant_snapshots_delete_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_instant_snapshot_groups_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_Delete_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -24173,28 +24433,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_delete_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.GetIamPolicy", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "GetIamPolicy" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.GetInstantSnapshotGroupRequest" }, { "name": "project", @@ -24205,7 +24465,7 @@ "type": "str" }, { - "name": "resource", + "name": "instant_snapshot_group", "type": "str" }, { @@ -24221,14 +24481,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.compute_v1.types.InstantSnapshotGroup", + "shortName": "get" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_instant_snapshots_get_iam_policy_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_instant_snapshot_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_Get_sync", "segments": [ { "end": 53, @@ -24261,28 +24521,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_get_iam_policy_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.get", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.Get", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "Get" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.InsertInstantSnapshotGroupRequest" }, { "name": "project", @@ -24293,8 +24553,8 @@ "type": "str" }, { - "name": "instant_snapshot", - "type": "str" + "name": "instant_snapshot_group_resource", + "type": "google.cloud.compute_v1.types.InstantSnapshotGroup" }, { "name": "retry", @@ -24309,22 +24569,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InstantSnapshot", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for Get", - "file": "compute_v1_generated_instant_snapshots_get_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_instant_snapshot_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_Get_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -24334,43 +24594,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_get_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.insert", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.Insert", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.List", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "Insert" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.ListInstantSnapshotGroupsRequest" }, { "name": "project", @@ -24380,10 +24640,6 @@ "name": "zone", "type": "str" }, - { - "name": "instant_snapshot_resource", - "type": "google.cloud.compute_v1.types.InstantSnapshot" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -24397,22 +24653,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.instant_snapshot_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_instant_snapshots_insert_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_instant_snapshot_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_Insert_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -24432,33 +24688,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_insert_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.list", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.List", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "List" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInstantSnapshotsRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyInstantSnapshotGroupRequest" }, { "name": "project", @@ -24468,6 +24724,14 @@ "name": "zone", "type": "str" }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -24481,14 +24745,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.instant_snapshots.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for List", - "file": "compute_v1_generated_instant_snapshots_list_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_instant_snapshot_groups_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_List_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -24506,43 +24770,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_list_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", - "shortName": "InstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient", + "shortName": "InstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.InstantSnapshotGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.SetIamPolicy", + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.InstantSnapshots", - "shortName": "InstantSnapshots" + "fullName": "google.cloud.compute.v1.InstantSnapshotGroups", + "shortName": "InstantSnapshotGroups" }, - "shortName": "SetIamPolicy" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsInstantSnapshotGroupRequest" }, { "name": "project", @@ -24557,8 +24821,8 @@ "type": "str" }, { - "name": "zone_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -24573,14 +24837,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_instant_snapshots_set_iam_policy_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_instant_snapshot_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_InstantSnapshotGroups_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -24613,7 +24877,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_set_iam_policy_sync.py" + "title": "compute_v1_generated_instant_snapshot_groups_test_iam_permissions_sync.py" }, { "canonical": true, @@ -24622,35 +24886,111 @@ "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.set_labels", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.SetLabels", + "fullName": "google.cloud.compute.v1.InstantSnapshots.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.InstantSnapshots", "shortName": "InstantSnapshots" }, - "shortName": "SetLabels" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.AggregatedListInstantSnapshotsRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.instant_snapshots.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_instant_snapshots_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_InstantSnapshots_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_instant_snapshots_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.InstantSnapshots.Delete", + "service": { + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteInstantSnapshotRequest" + }, + { + "name": "project", "type": "str" }, { - "name": "resource", + "name": "zone", "type": "str" }, { - "name": "zone_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.ZoneSetLabelsRequest" + "name": "instant_snapshot", + "type": "str" }, { "name": "retry", @@ -24666,13 +25006,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "shortName": "delete" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_instant_snapshots_set_labels_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_instant_snapshots_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_SetLabels_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_Delete_sync", "segments": [ { "end": 53, @@ -24705,7 +25045,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_set_labels_sync.py" + "title": "compute_v1_generated_instant_snapshots_delete_sync.py" }, { "canonical": true, @@ -24714,19 +25054,19 @@ "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InstantSnapshots.TestIamPermissions", + "fullName": "google.cloud.compute.v1.InstantSnapshots.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.InstantSnapshots", "shortName": "InstantSnapshots" }, - "shortName": "TestIamPermissions" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyInstantSnapshotRequest" }, { "name": "project", @@ -24740,10 +25080,6 @@ "name": "resource", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -24757,14 +25093,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_instant_snapshots_test_iam_permissions_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_instant_snapshots_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InstantSnapshots_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -24797,35 +25133,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_instant_snapshots_test_iam_permissions_sync.py" + "title": "compute_v1_generated_instant_snapshots_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", - "shortName": "InterconnectAttachmentGroupsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.delete", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Delete", + "fullName": "google.cloud.compute.v1.InstantSnapshots.Get", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", - "shortName": "InterconnectAttachmentGroups" + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" }, - "shortName": "Delete" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.GetInstantSnapshotRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_attachment_group", + "name": "zone", + "type": "str" + }, + { + "name": "instant_snapshot", "type": "str" }, { @@ -24841,22 +25181,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.InstantSnapshot", + "shortName": "get" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_interconnect_attachment_groups_delete_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_instant_snapshots_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Delete_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -24866,52 +25206,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_delete_sync.py" + "title": "compute_v1_generated_instant_snapshots_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", - "shortName": "InterconnectAttachmentGroupsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.GetIamPolicy", + "fullName": "google.cloud.compute.v1.InstantSnapshots.Insert", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", - "shortName": "InterconnectAttachmentGroups" + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" }, - "shortName": "GetIamPolicy" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.InsertInstantSnapshotRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "zone", "type": "str" }, + { + "name": "instant_snapshot_resource", + "type": "google.cloud.compute_v1.types.InstantSnapshot" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -24925,14 +25269,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_interconnect_attachment_groups_get_iam_policy_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_instant_snapshots_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_Insert_sync", "segments": [ { "end": 52, @@ -24965,35 +25309,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_get_iam_policy_sync.py" + "title": "compute_v1_generated_instant_snapshots_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", - "shortName": "InterconnectAttachmentGroupsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.get_operational_status", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.GetOperationalStatus", + "fullName": "google.cloud.compute.v1.InstantSnapshots.List", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", - "shortName": "InterconnectAttachmentGroups" + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" }, - "shortName": "GetOperationalStatus" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetOperationalStatusInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.ListInstantSnapshotsRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_attachment_group", + "name": "zone", "type": "str" }, { @@ -25009,22 +25353,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectAttachmentGroupsGetOperationalStatusResponse", - "shortName": "get_operational_status" + "resultType": "google.cloud.compute_v1.services.instant_snapshots.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for GetOperationalStatus", - "file": "compute_v1_generated_interconnect_attachment_groups_get_operational_status_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_instant_snapshots_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_GetOperationalStatus_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -25044,42 +25388,50 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_get_operational_status_sync.py" + "title": "compute_v1_generated_instant_snapshots_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", - "shortName": "InterconnectAttachmentGroupsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.get", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Get", + "fullName": "google.cloud.compute.v1.InstantSnapshots.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", - "shortName": "InterconnectAttachmentGroups" + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" }, - "shortName": "Get" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyInstantSnapshotRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_attachment_group", + "name": "zone", + "type": "str" + }, + { + "name": "resource", "type": "str" }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25093,22 +25445,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectAttachmentGroup", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Get", - "file": "compute_v1_generated_interconnect_attachment_groups_get_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_instant_snapshots_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Get_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_SetIamPolicy_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -25118,51 +25470,59 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_get_sync.py" + "title": "compute_v1_generated_instant_snapshots_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", - "shortName": "InterconnectAttachmentGroupsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.insert", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Insert", + "fullName": "google.cloud.compute.v1.InstantSnapshots.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", - "shortName": "InterconnectAttachmentGroups" + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" }, - "shortName": "Insert" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.SetLabelsInstantSnapshotRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_attachment_group_resource", - "type": "google.cloud.compute_v1.types.InterconnectAttachmentGroup" + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "zone_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetLabelsRequest" }, { "name": "retry", @@ -25178,21 +25538,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "set_labels" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_interconnect_attachment_groups_insert_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_instant_snapshots_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Insert_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_SetLabels_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -25202,48 +25562,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_insert_sync.py" + "title": "compute_v1_generated_instant_snapshots_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", - "shortName": "InterconnectAttachmentGroupsClient" + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient", + "shortName": "InstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.list", + "fullName": "google.cloud.compute_v1.InstantSnapshotsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.List", + "fullName": "google.cloud.compute.v1.InstantSnapshots.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", - "shortName": "InterconnectAttachmentGroups" + "fullName": "google.cloud.compute.v1.InstantSnapshots", + "shortName": "InstantSnapshots" }, - "shortName": "List" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectAttachmentGroupsRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsInstantSnapshotRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25257,22 +25629,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnect_attachment_groups.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for List", - "file": "compute_v1_generated_interconnect_attachment_groups_list_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_instant_snapshots_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_List_sync", + "regionTag": "compute_v1_generated_InstantSnapshots_TestIamPermissions_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -25282,22 +25654,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_list_sync.py" + "title": "compute_v1_generated_instant_snapshots_test_iam_permissions_sync.py" }, { "canonical": true, @@ -25306,19 +25678,19 @@ "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.patch", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Patch", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Delete", "service": { "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", "shortName": "InterconnectAttachmentGroups" }, - "shortName": "Patch" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.DeleteInterconnectAttachmentGroupRequest" }, { "name": "project", @@ -25328,10 +25700,6 @@ "name": "interconnect_attachment_group", "type": "str" }, - { - "name": "interconnect_attachment_group_resource", - "type": "google.cloud.compute_v1.types.InterconnectAttachmentGroup" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25346,13 +25714,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "delete" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_interconnect_attachment_groups_patch_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_interconnect_attachment_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Patch_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Delete_sync", "segments": [ { "end": 52, @@ -25385,7 +25753,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_patch_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_delete_sync.py" }, { "canonical": true, @@ -25394,19 +25762,19 @@ "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.SetIamPolicy", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", "shortName": "InterconnectAttachmentGroups" }, - "shortName": "SetIamPolicy" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyInterconnectAttachmentGroupRequest" }, { "name": "project", @@ -25416,10 +25784,6 @@ "name": "resource", "type": "str" }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25434,13 +25798,13 @@ } ], "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "shortName": "get_iam_policy" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_interconnect_attachment_groups_set_iam_policy_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_interconnect_attachment_groups_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_GetIamPolicy_sync", "segments": [ { "end": 52, @@ -25473,7 +25837,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_set_iam_policy_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_get_iam_policy_sync.py" }, { "canonical": true, @@ -25482,32 +25846,28 @@ "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.get_operational_status", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.TestIamPermissions", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.GetOperationalStatus", "service": { "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", "shortName": "InterconnectAttachmentGroups" }, - "shortName": "TestIamPermissions" + "shortName": "GetOperationalStatus" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsInterconnectAttachmentGroupRequest" + "type": "google.cloud.compute_v1.types.GetOperationalStatusInterconnectAttachmentGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "interconnect_attachment_group", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25521,14 +25881,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.InterconnectAttachmentGroupsGetOperationalStatusResponse", + "shortName": "get_operational_status" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_interconnect_attachment_groups_test_iam_permissions_sync.py", + "description": "Sample for GetOperationalStatus", + "file": "compute_v1_generated_interconnect_attachment_groups_get_operational_status_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_GetOperationalStatus_sync", "segments": [ { "end": 52, @@ -25561,33 +25921,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachment_groups_test_iam_permissions_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_get_operational_status_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", - "shortName": "InterconnectAttachmentsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", + "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.AggregatedList", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments", - "shortName": "InterconnectAttachments" + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", + "shortName": "InterconnectAttachmentGroups" }, - "shortName": "AggregatedList" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectAttachmentGroupRequest" }, { "name": "project", "type": "str" }, + { + "name": "interconnect_attachment_group", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25601,14 +25965,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnect_attachments.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.types.InterconnectAttachmentGroup", + "shortName": "get" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_interconnect_attachments_aggregated_list_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_attachment_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_AggregatedList_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Get_sync", "segments": [ { "end": 52, @@ -25626,55 +25990,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_aggregated_list_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", - "shortName": "InterconnectAttachmentsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", + "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.delete", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.Delete", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments", - "shortName": "InterconnectAttachments" + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", + "shortName": "InterconnectAttachmentGroups" }, - "shortName": "Delete" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest" + "type": "google.cloud.compute_v1.types.InsertInterconnectAttachmentGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", - "type": "str" - }, - { - "name": "interconnect_attachment", - "type": "str" + "name": "interconnect_attachment_group_resource", + "type": "google.cloud.compute_v1.types.InterconnectAttachmentGroup" }, { "name": "retry", @@ -25690,21 +26050,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "insert" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_interconnect_attachments_delete_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnect_attachment_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_Delete_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 51, "start": 27, "type": "SHORT" }, @@ -25714,56 +26074,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_delete_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", - "shortName": "InterconnectAttachmentsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", + "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.get", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.Get", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.List", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments", - "shortName": "InterconnectAttachments" + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", + "shortName": "InterconnectAttachmentGroups" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectAttachmentRequest" + "type": "google.cloud.compute_v1.types.ListInterconnectAttachmentGroupsRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "interconnect_attachment", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25777,22 +26129,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectAttachment", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.interconnect_attachment_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_interconnect_attachments_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_attachment_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_Get_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_List_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -25802,55 +26154,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_get_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", - "shortName": "InterconnectAttachmentsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", + "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.insert", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.Insert", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.Patch", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments", - "shortName": "InterconnectAttachments" + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", + "shortName": "InterconnectAttachmentGroups" }, - "shortName": "Insert" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest" + "type": "google.cloud.compute_v1.types.PatchInterconnectAttachmentGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "interconnect_attachment_group", "type": "str" }, { - "name": "interconnect_attachment_resource", - "type": "google.cloud.compute_v1.types.InterconnectAttachment" + "name": "interconnect_attachment_group_resource", + "type": "google.cloud.compute_v1.types.InterconnectAttachmentGroup" }, { "name": "retry", @@ -25866,13 +26218,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "patch" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_interconnect_attachments_insert_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnect_attachment_groups_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_Insert_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_Patch_sync", "segments": [ { "end": 52, @@ -25905,37 +26257,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_insert_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", - "shortName": "InterconnectAttachmentsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", + "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.list", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.List", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments", - "shortName": "InterconnectAttachments" + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", + "shortName": "InterconnectAttachmentGroups" }, - "shortName": "List" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyInterconnectAttachmentGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "resource", "type": "str" }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -25949,22 +26305,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnect_attachments.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for List", - "file": "compute_v1_generated_interconnect_attachments_list_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_interconnect_attachment_groups_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_List_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_SetIamPolicy_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -25984,49 +26340,45 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_list_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", - "shortName": "InterconnectAttachmentsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient", + "shortName": "InterconnectAttachmentGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.patch", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.Patch", + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments", - "shortName": "InterconnectAttachments" + "fullName": "google.cloud.compute.v1.InterconnectAttachmentGroups", + "shortName": "InterconnectAttachmentGroups" }, - "shortName": "Patch" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsInterconnectAttachmentGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", - "type": "str" - }, - { - "name": "interconnect_attachment", + "name": "resource", "type": "str" }, { - "name": "interconnect_attachment_resource", - "type": "google.cloud.compute_v1.types.InterconnectAttachment" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -26041,22 +26393,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_interconnect_attachments_patch_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_interconnect_attachment_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_Patch_sync", + "regionTag": "compute_v1_generated_InterconnectAttachmentGroups_TestIamPermissions_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -26066,22 +26418,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_patch_sync.py" + "title": "compute_v1_generated_interconnect_attachment_groups_test_iam_permissions_sync.py" }, { "canonical": true, @@ -26090,36 +26442,24 @@ "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.set_labels", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.InterconnectAttachments.SetLabels", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.InterconnectAttachments", "shortName": "InterconnectAttachments" }, - "shortName": "SetLabels" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsInterconnectAttachmentRequest" + "type": "google.cloud.compute_v1.types.AggregatedListInterconnectAttachmentsRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "region_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26133,22 +26473,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "resultType": "google.cloud.compute_v1.services.interconnect_attachments.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_interconnect_attachments_set_labels_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_interconnect_attachments_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectAttachments_SetLabels_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -26158,55 +26498,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_attachments_set_labels_sync.py" + "title": "compute_v1_generated_interconnect_attachments_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", - "shortName": "InterconnectGroupsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.create_members", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.CreateMembers", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Delete", "service": { - "fullName": "google.cloud.compute.v1.InterconnectGroups", - "shortName": "InterconnectGroups" + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" }, - "shortName": "CreateMembers" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CreateMembersInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.DeleteInterconnectAttachmentRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_group", + "name": "region", "type": "str" }, { - "name": "interconnect_groups_create_members_request_resource", - "type": "google.cloud.compute_v1.types.InterconnectGroupsCreateMembersRequest" + "name": "interconnect_attachment", + "type": "str" }, { "name": "retry", @@ -26222,21 +26562,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "create_members" + "shortName": "delete" }, - "description": "Sample for CreateMembers", - "file": "compute_v1_generated_interconnect_groups_create_members_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_interconnect_attachments_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_CreateMembers_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_Delete_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -26246,50 +26586,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_create_members_sync.py" + "title": "compute_v1_generated_interconnect_attachments_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", - "shortName": "InterconnectGroupsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.delete", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.Delete", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Get", "service": { - "fullName": "google.cloud.compute.v1.InterconnectGroups", - "shortName": "InterconnectGroups" + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" }, - "shortName": "Delete" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectAttachmentRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_group", + "name": "region", + "type": "str" + }, + { + "name": "interconnect_attachment", "type": "str" }, { @@ -26305,22 +26649,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.InterconnectAttachment", + "shortName": "get" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_interconnect_groups_delete_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_attachments_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_Delete_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -26330,52 +26674,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_delete_sync.py" + "title": "compute_v1_generated_interconnect_attachments_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", - "shortName": "InterconnectGroupsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.GetIamPolicy", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Insert", "service": { - "fullName": "google.cloud.compute.v1.InterconnectGroups", - "shortName": "InterconnectGroups" + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" }, - "shortName": "GetIamPolicy" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.InsertInterconnectAttachmentRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "region", "type": "str" }, + { + "name": "interconnect_attachment_resource", + "type": "google.cloud.compute_v1.types.InterconnectAttachment" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26389,14 +26737,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_interconnect_groups_get_iam_policy_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnect_attachments_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_Insert_sync", "segments": [ { "end": 52, @@ -26429,35 +26777,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_get_iam_policy_sync.py" + "title": "compute_v1_generated_interconnect_attachments_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", - "shortName": "InterconnectGroupsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.get_operational_status", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.GetOperationalStatus", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.List", "service": { - "fullName": "google.cloud.compute.v1.InterconnectGroups", - "shortName": "InterconnectGroups" + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" }, - "shortName": "GetOperationalStatus" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetOperationalStatusInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.ListInterconnectAttachmentsRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_group", + "name": "region", "type": "str" }, { @@ -26473,22 +26821,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectGroupsGetOperationalStatusResponse", - "shortName": "get_operational_status" + "resultType": "google.cloud.compute_v1.services.interconnect_attachments.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for GetOperationalStatus", - "file": "compute_v1_generated_interconnect_groups_get_operational_status_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_attachments_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_GetOperationalStatus_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -26508,42 +26856,50 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_get_operational_status_sync.py" + "title": "compute_v1_generated_interconnect_attachments_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", - "shortName": "InterconnectGroupsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.get", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.Get", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.Patch", "service": { - "fullName": "google.cloud.compute.v1.InterconnectGroups", - "shortName": "InterconnectGroups" + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" }, - "shortName": "Get" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.PatchInterconnectAttachmentRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_group", + "name": "region", + "type": "str" + }, + { + "name": "interconnect_attachment", "type": "str" }, + { + "name": "interconnect_attachment_resource", + "type": "google.cloud.compute_v1.types.InterconnectAttachment" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26557,22 +26913,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectGroup", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for Get", - "file": "compute_v1_generated_interconnect_groups_get_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnect_attachments_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_Get_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_Patch_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -26582,51 +26938,59 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_get_sync.py" + "title": "compute_v1_generated_interconnect_attachments_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", - "shortName": "InterconnectGroupsClient" + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient", + "shortName": "InterconnectAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.insert", + "fullName": "google.cloud.compute_v1.InterconnectAttachmentsClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.Insert", + "fullName": "google.cloud.compute.v1.InterconnectAttachments.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.InterconnectGroups", - "shortName": "InterconnectGroups" + "fullName": "google.cloud.compute.v1.InterconnectAttachments", + "shortName": "InterconnectAttachments" }, - "shortName": "Insert" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.SetLabelsInterconnectAttachmentRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_group_resource", - "type": "google.cloud.compute_v1.types.InterconnectGroup" + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" }, { "name": "retry", @@ -26642,21 +27006,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "set_labels" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_interconnect_groups_insert_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_interconnect_attachments_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_Insert_sync", + "regionTag": "compute_v1_generated_InterconnectAttachments_SetLabels_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -26666,22 +27030,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_insert_sync.py" + "title": "compute_v1_generated_interconnect_attachments_set_labels_sync.py" }, { "canonical": true, @@ -26690,24 +27054,32 @@ "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.list", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.create_members", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.List", + "fullName": "google.cloud.compute.v1.InterconnectGroups.CreateMembers", "service": { "fullName": "google.cloud.compute.v1.InterconnectGroups", "shortName": "InterconnectGroups" }, - "shortName": "List" + "shortName": "CreateMembers" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectGroupsRequest" + "type": "google.cloud.compute_v1.types.CreateMembersInterconnectGroupRequest" }, { "name": "project", "type": "str" }, + { + "name": "interconnect_group", + "type": "str" + }, + { + "name": "interconnect_groups_create_members_request_resource", + "type": "google.cloud.compute_v1.types.InterconnectGroupsCreateMembersRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26721,14 +27093,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnect_groups.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_members" }, - "description": "Sample for List", - "file": "compute_v1_generated_interconnect_groups_list_sync.py", + "description": "Sample for CreateMembers", + "file": "compute_v1_generated_interconnect_groups_create_members_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_List_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_CreateMembers_sync", "segments": [ { "end": 52, @@ -26746,22 +27118,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_list_sync.py" + "title": "compute_v1_generated_interconnect_groups_create_members_sync.py" }, { "canonical": true, @@ -26770,19 +27142,19 @@ "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.patch", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.Patch", + "fullName": "google.cloud.compute.v1.InterconnectGroups.Delete", "service": { "fullName": "google.cloud.compute.v1.InterconnectGroups", "shortName": "InterconnectGroups" }, - "shortName": "Patch" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.DeleteInterconnectGroupRequest" }, { "name": "project", @@ -26792,10 +27164,6 @@ "name": "interconnect_group", "type": "str" }, - { - "name": "interconnect_group_resource", - "type": "google.cloud.compute_v1.types.InterconnectGroup" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26810,13 +27178,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "delete" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_interconnect_groups_patch_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_interconnect_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_Patch_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_Delete_sync", "segments": [ { "end": 52, @@ -26849,7 +27217,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_patch_sync.py" + "title": "compute_v1_generated_interconnect_groups_delete_sync.py" }, { "canonical": true, @@ -26858,19 +27226,19 @@ "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.SetIamPolicy", + "fullName": "google.cloud.compute.v1.InterconnectGroups.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.InterconnectGroups", "shortName": "InterconnectGroups" }, - "shortName": "SetIamPolicy" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyInterconnectGroupRequest" }, { "name": "project", @@ -26880,10 +27248,6 @@ "name": "resource", "type": "str" }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26898,13 +27262,13 @@ } ], "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "shortName": "get_iam_policy" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_interconnect_groups_set_iam_policy_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_interconnect_groups_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_GetIamPolicy_sync", "segments": [ { "end": 52, @@ -26937,7 +27301,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_set_iam_policy_sync.py" + "title": "compute_v1_generated_interconnect_groups_get_iam_policy_sync.py" }, { "canonical": true, @@ -26946,32 +27310,28 @@ "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.get_operational_status", "method": { - "fullName": "google.cloud.compute.v1.InterconnectGroups.TestIamPermissions", + "fullName": "google.cloud.compute.v1.InterconnectGroups.GetOperationalStatus", "service": { "fullName": "google.cloud.compute.v1.InterconnectGroups", "shortName": "InterconnectGroups" }, - "shortName": "TestIamPermissions" + "shortName": "GetOperationalStatus" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsInterconnectGroupRequest" + "type": "google.cloud.compute_v1.types.GetOperationalStatusInterconnectGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "interconnect_group", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -26985,14 +27345,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.InterconnectGroupsGetOperationalStatusResponse", + "shortName": "get_operational_status" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_interconnect_groups_test_iam_permissions_sync.py", + "description": "Sample for GetOperationalStatus", + "file": "compute_v1_generated_interconnect_groups_get_operational_status_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectGroups_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_GetOperationalStatus_sync", "segments": [ { "end": 52, @@ -27025,35 +27385,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_groups_test_iam_permissions_sync.py" + "title": "compute_v1_generated_interconnect_groups_get_operational_status_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectLocationsClient", - "shortName": "InterconnectLocationsClient" + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", + "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectLocationsClient.get", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.InterconnectLocations.Get", + "fullName": "google.cloud.compute.v1.InterconnectGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.InterconnectLocations", - "shortName": "InterconnectLocations" + "fullName": "google.cloud.compute.v1.InterconnectGroups", + "shortName": "InterconnectGroups" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectLocationRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_location", + "name": "interconnect_group", "type": "str" }, { @@ -27069,14 +27429,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectLocation", + "resultType": "google.cloud.compute_v1.types.InterconnectGroup", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_interconnect_locations_get_sync.py", + "file": "compute_v1_generated_interconnect_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectLocations_Get_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_Get_sync", "segments": [ { "end": 52, @@ -27109,33 +27469,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_locations_get_sync.py" + "title": "compute_v1_generated_interconnect_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectLocationsClient", - "shortName": "InterconnectLocationsClient" + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", + "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectLocationsClient.list", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.InterconnectLocations.List", + "fullName": "google.cloud.compute.v1.InterconnectGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.InterconnectLocations", - "shortName": "InterconnectLocations" + "fullName": "google.cloud.compute.v1.InterconnectGroups", + "shortName": "InterconnectGroups" }, - "shortName": "List" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectLocationsRequest" + "type": "google.cloud.compute_v1.types.InsertInterconnectGroupRequest" }, { "name": "project", "type": "str" }, + { + "name": "interconnect_group_resource", + "type": "google.cloud.compute_v1.types.InterconnectGroup" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27149,22 +27513,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnect_locations.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for List", - "file": "compute_v1_generated_interconnect_locations_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnect_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectLocations_List_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_Insert_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -27184,42 +27548,38 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_locations_list_sync.py" + "title": "compute_v1_generated_interconnect_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient", - "shortName": "InterconnectRemoteLocationsClient" + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", + "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient.get", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations.Get", + "fullName": "google.cloud.compute.v1.InterconnectGroups.List", "service": { - "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations", - "shortName": "InterconnectRemoteLocations" + "fullName": "google.cloud.compute.v1.InterconnectGroups", + "shortName": "InterconnectGroups" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectRemoteLocationRequest" + "type": "google.cloud.compute_v1.types.ListInterconnectGroupsRequest" }, { "name": "project", "type": "str" }, - { - "name": "interconnect_remote_location", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27233,14 +27593,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectRemoteLocation", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.interconnect_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_interconnect_remote_locations_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectRemoteLocations_Get_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_List_sync", "segments": [ { "end": 52, @@ -27258,48 +27618,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_remote_locations_get_sync.py" + "title": "compute_v1_generated_interconnect_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient", - "shortName": "InterconnectRemoteLocationsClient" + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", + "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient.list", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations.List", + "fullName": "google.cloud.compute.v1.InterconnectGroups.Patch", "service": { - "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations", - "shortName": "InterconnectRemoteLocations" + "fullName": "google.cloud.compute.v1.InterconnectGroups", + "shortName": "InterconnectGroups" }, - "shortName": "List" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectRemoteLocationsRequest" + "type": "google.cloud.compute_v1.types.PatchInterconnectGroupRequest" }, { "name": "project", "type": "str" }, + { + "name": "interconnect_group", + "type": "str" + }, + { + "name": "interconnect_group_resource", + "type": "google.cloud.compute_v1.types.InterconnectGroup" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27313,14 +27681,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnect_remote_locations.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for List", - "file": "compute_v1_generated_interconnect_remote_locations_list_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnect_groups_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_InterconnectRemoteLocations_List_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_Patch_sync", "segments": [ { "end": 52, @@ -27338,52 +27706,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnect_remote_locations_list_sync.py" + "title": "compute_v1_generated_interconnect_groups_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", + "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.delete", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Delete", + "fullName": "google.cloud.compute.v1.InterconnectGroups.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" + "fullName": "google.cloud.compute.v1.InterconnectGroups", + "shortName": "InterconnectGroups" }, - "shortName": "Delete" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInterconnectRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyInterconnectGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect", + "name": "resource", "type": "str" }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27397,14 +27769,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_interconnects_delete_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_interconnect_groups_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Delete_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_SetIamPolicy_sync", "segments": [ { "end": 52, @@ -27437,37 +27809,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_delete_sync.py" + "title": "compute_v1_generated_interconnect_groups_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient", + "shortName": "InterconnectGroupsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.get_diagnostics", + "fullName": "google.cloud.compute_v1.InterconnectGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.GetDiagnostics", + "fullName": "google.cloud.compute.v1.InterconnectGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" + "fullName": "google.cloud.compute.v1.InterconnectGroups", + "shortName": "InterconnectGroups" }, - "shortName": "GetDiagnostics" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetDiagnosticsInterconnectRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsInterconnectGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27481,14 +27857,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectsGetDiagnosticsResponse", - "shortName": "get_diagnostics" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for GetDiagnostics", - "file": "compute_v1_generated_interconnects_get_diagnostics_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_interconnect_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_GetDiagnostics_sync", + "regionTag": "compute_v1_generated_InterconnectGroups_TestIamPermissions_sync", "segments": [ { "end": 52, @@ -27521,35 +27897,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_get_diagnostics_sync.py" + "title": "compute_v1_generated_interconnect_groups_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient", + "shortName": "InterconnectLocationsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.get_macsec_config", + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient.get", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.GetMacsecConfig", + "fullName": "google.cloud.compute.v1.InterconnectLocations.Get", "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" + "fullName": "google.cloud.compute.v1.InterconnectLocations", + "shortName": "InterconnectLocations" }, - "shortName": "GetMacsecConfig" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetMacsecConfigInterconnectRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectLocationRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect", + "name": "interconnect_location", "type": "str" }, { @@ -27565,14 +27941,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InterconnectsGetMacsecConfigResponse", - "shortName": "get_macsec_config" + "resultType": "google.cloud.compute_v1.types.InterconnectLocation", + "shortName": "get" }, - "description": "Sample for GetMacsecConfig", - "file": "compute_v1_generated_interconnects_get_macsec_config_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_locations_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_GetMacsecConfig_sync", + "regionTag": "compute_v1_generated_InterconnectLocations_Get_sync", "segments": [ { "end": 52, @@ -27605,37 +27981,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_get_macsec_config_sync.py" + "title": "compute_v1_generated_interconnect_locations_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient", + "shortName": "InterconnectLocationsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.get", + "fullName": "google.cloud.compute_v1.InterconnectLocationsClient.list", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Get", + "fullName": "google.cloud.compute.v1.InterconnectLocations.List", "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" + "fullName": "google.cloud.compute.v1.InterconnectLocations", + "shortName": "InterconnectLocations" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetInterconnectRequest" + "type": "google.cloud.compute_v1.types.ListInterconnectLocationsRequest" }, { "name": "project", "type": "str" }, - { - "name": "interconnect", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27649,14 +28021,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Interconnect", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.interconnect_locations.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_interconnects_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_interconnect_locations_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Get_sync", + "regionTag": "compute_v1_generated_InterconnectLocations_List_sync", "segments": [ { "end": 52, @@ -27674,51 +28046,51 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_get_sync.py" + "title": "compute_v1_generated_interconnect_locations_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient", + "shortName": "InterconnectRemoteLocationsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.insert", + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient.get", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Insert", + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations.Get", "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations", + "shortName": "InterconnectRemoteLocations" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertInterconnectRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectRemoteLocationRequest" }, { "name": "project", "type": "str" }, { - "name": "interconnect_resource", - "type": "google.cloud.compute_v1.types.Interconnect" + "name": "interconnect_remote_location", + "type": "str" }, { "name": "retry", @@ -27733,22 +28105,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.InterconnectRemoteLocation", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_interconnects_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_interconnect_remote_locations_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Insert_sync", + "regionTag": "compute_v1_generated_InterconnectRemoteLocations_Get_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -27758,43 +28130,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_insert_sync.py" + "title": "compute_v1_generated_interconnect_remote_locations_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.InterconnectsClient", - "shortName": "InterconnectsClient" + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient", + "shortName": "InterconnectRemoteLocationsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.list", + "fullName": "google.cloud.compute_v1.InterconnectRemoteLocationsClient.list", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.List", + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations.List", "service": { - "fullName": "google.cloud.compute.v1.Interconnects", - "shortName": "Interconnects" + "fullName": "google.cloud.compute.v1.InterconnectRemoteLocations", + "shortName": "InterconnectRemoteLocations" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInterconnectsRequest" + "type": "google.cloud.compute_v1.types.ListInterconnectRemoteLocationsRequest" }, { "name": "project", @@ -27813,14 +28185,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.interconnects.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.interconnect_remote_locations.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_interconnects_list_sync.py", + "file": "compute_v1_generated_interconnect_remote_locations_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_List_sync", + "regionTag": "compute_v1_generated_InterconnectRemoteLocations_List_sync", "segments": [ { "end": 52, @@ -27853,7 +28225,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_list_sync.py" + "title": "compute_v1_generated_interconnect_remote_locations_list_sync.py" }, { "canonical": true, @@ -27862,19 +28234,19 @@ "fullName": "google.cloud.compute_v1.InterconnectsClient", "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.patch", + "fullName": "google.cloud.compute_v1.InterconnectsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.Patch", + "fullName": "google.cloud.compute.v1.Interconnects.Delete", "service": { "fullName": "google.cloud.compute.v1.Interconnects", "shortName": "Interconnects" }, - "shortName": "Patch" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchInterconnectRequest" + "type": "google.cloud.compute_v1.types.DeleteInterconnectRequest" }, { "name": "project", @@ -27884,10 +28256,6 @@ "name": "interconnect", "type": "str" }, - { - "name": "interconnect_resource", - "type": "google.cloud.compute_v1.types.Interconnect" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27902,13 +28270,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "delete" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_interconnects_patch_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_interconnects_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_Patch_sync", + "regionTag": "compute_v1_generated_Interconnects_Delete_sync", "segments": [ { "end": 52, @@ -27941,7 +28309,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_patch_sync.py" + "title": "compute_v1_generated_interconnects_delete_sync.py" }, { "canonical": true, @@ -27950,32 +28318,28 @@ "fullName": "google.cloud.compute_v1.InterconnectsClient", "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.InterconnectsClient.set_labels", + "fullName": "google.cloud.compute_v1.InterconnectsClient.get_diagnostics", "method": { - "fullName": "google.cloud.compute.v1.Interconnects.SetLabels", + "fullName": "google.cloud.compute.v1.Interconnects.GetDiagnostics", "service": { "fullName": "google.cloud.compute.v1.Interconnects", "shortName": "Interconnects" }, - "shortName": "SetLabels" + "shortName": "GetDiagnostics" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsInterconnectRequest" + "type": "google.cloud.compute_v1.types.GetDiagnosticsInterconnectRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "interconnect", "type": "str" }, - { - "name": "global_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -27989,14 +28353,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "resultType": "google.cloud.compute_v1.types.InterconnectsGetDiagnosticsResponse", + "shortName": "get_diagnostics" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_interconnects_set_labels_sync.py", + "description": "Sample for GetDiagnostics", + "file": "compute_v1_generated_interconnects_get_diagnostics_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Interconnects_SetLabels_sync", + "regionTag": "compute_v1_generated_Interconnects_GetDiagnostics_sync", "segments": [ { "end": 52, @@ -28029,35 +28393,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_interconnects_set_labels_sync.py" + "title": "compute_v1_generated_interconnects_get_diagnostics_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicenseCodesClient", - "shortName": "LicenseCodesClient" + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.LicenseCodesClient.get", + "fullName": "google.cloud.compute_v1.InterconnectsClient.get_macsec_config", "method": { - "fullName": "google.cloud.compute.v1.LicenseCodes.Get", + "fullName": "google.cloud.compute.v1.Interconnects.GetMacsecConfig", "service": { - "fullName": "google.cloud.compute.v1.LicenseCodes", - "shortName": "LicenseCodes" + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" }, - "shortName": "Get" + "shortName": "GetMacsecConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetLicenseCodeRequest" + "type": "google.cloud.compute_v1.types.GetMacsecConfigInterconnectRequest" }, { "name": "project", "type": "str" }, { - "name": "license_code", + "name": "interconnect", "type": "str" }, { @@ -28073,14 +28437,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.LicenseCode", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.InterconnectsGetMacsecConfigResponse", + "shortName": "get_macsec_config" }, - "description": "Sample for Get", - "file": "compute_v1_generated_license_codes_get_sync.py", + "description": "Sample for GetMacsecConfig", + "file": "compute_v1_generated_interconnects_get_macsec_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_LicenseCodes_Get_sync", + "regionTag": "compute_v1_generated_Interconnects_GetMacsecConfig_sync", "segments": [ { "end": 52, @@ -28113,41 +28477,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_license_codes_get_sync.py" + "title": "compute_v1_generated_interconnects_get_macsec_config_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicenseCodesClient", - "shortName": "LicenseCodesClient" + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.LicenseCodesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.InterconnectsClient.get", "method": { - "fullName": "google.cloud.compute.v1.LicenseCodes.TestIamPermissions", + "fullName": "google.cloud.compute.v1.Interconnects.Get", "service": { - "fullName": "google.cloud.compute.v1.LicenseCodes", - "shortName": "LicenseCodes" + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" }, - "shortName": "TestIamPermissions" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsLicenseCodeRequest" + "type": "google.cloud.compute_v1.types.GetInterconnectRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "interconnect", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28161,14 +28521,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.Interconnect", + "shortName": "get" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_license_codes_test_iam_permissions_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_interconnects_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_LicenseCodes_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_Interconnects_Get_sync", "segments": [ { "end": 52, @@ -28201,36 +28561,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_license_codes_test_iam_permissions_sync.py" + "title": "compute_v1_generated_interconnects_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicensesClient", - "shortName": "LicensesClient" + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.delete", + "fullName": "google.cloud.compute_v1.InterconnectsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.Licenses.Delete", + "fullName": "google.cloud.compute.v1.Interconnects.Insert", "service": { - "fullName": "google.cloud.compute.v1.Licenses", - "shortName": "Licenses" + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" }, - "shortName": "Delete" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteLicenseRequest" + "type": "google.cloud.compute_v1.types.InsertInterconnectRequest" }, { "name": "project", "type": "str" }, { - "name": "license_", - "type": "str" + "name": "interconnect_resource", + "type": "google.cloud.compute_v1.types.Interconnect" }, { "name": "retry", @@ -28246,13 +28606,93 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "insert" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_licenses_delete_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_interconnects_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_Delete_sync", + "regionTag": "compute_v1_generated_Interconnects_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_interconnects_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" + }, + "fullName": "google.cloud.compute_v1.InterconnectsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.Interconnects.List", + "service": { + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListInterconnectsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.interconnects.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_interconnects_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Interconnects_List_sync", "segments": [ { "end": 52, @@ -28270,52 +28710,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_delete_sync.py" + "title": "compute_v1_generated_interconnects_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicensesClient", - "shortName": "LicensesClient" + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.InterconnectsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.Licenses.GetIamPolicy", + "fullName": "google.cloud.compute.v1.Interconnects.Patch", "service": { - "fullName": "google.cloud.compute.v1.Licenses", - "shortName": "Licenses" + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" }, - "shortName": "GetIamPolicy" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyLicenseRequest" + "type": "google.cloud.compute_v1.types.PatchInterconnectRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "interconnect", "type": "str" }, + { + "name": "interconnect_resource", + "type": "google.cloud.compute_v1.types.Interconnect" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28329,14 +28773,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_licenses_get_iam_policy_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_interconnects_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_Interconnects_Patch_sync", "segments": [ { "end": 52, @@ -28369,37 +28813,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_get_iam_policy_sync.py" + "title": "compute_v1_generated_interconnects_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicensesClient", - "shortName": "LicensesClient" + "fullName": "google.cloud.compute_v1.InterconnectsClient", + "shortName": "InterconnectsClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.get", + "fullName": "google.cloud.compute_v1.InterconnectsClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.Licenses.Get", + "fullName": "google.cloud.compute.v1.Interconnects.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.Licenses", - "shortName": "Licenses" + "fullName": "google.cloud.compute.v1.Interconnects", + "shortName": "Interconnects" }, - "shortName": "Get" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetLicenseRequest" + "type": "google.cloud.compute_v1.types.SetLabelsInterconnectRequest" }, { "name": "project", "type": "str" }, { - "name": "license_", + "name": "resource", "type": "str" }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28413,14 +28861,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.License", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" }, - "description": "Sample for Get", - "file": "compute_v1_generated_licenses_get_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_interconnects_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_Get_sync", + "regionTag": "compute_v1_generated_Interconnects_SetLabels_sync", "segments": [ { "end": 52, @@ -28453,36 +28901,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_get_sync.py" + "title": "compute_v1_generated_interconnects_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicensesClient", - "shortName": "LicensesClient" + "fullName": "google.cloud.compute_v1.LicenseCodesClient", + "shortName": "LicenseCodesClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.insert", + "fullName": "google.cloud.compute_v1.LicenseCodesClient.get", "method": { - "fullName": "google.cloud.compute.v1.Licenses.Insert", + "fullName": "google.cloud.compute.v1.LicenseCodes.Get", "service": { - "fullName": "google.cloud.compute.v1.Licenses", - "shortName": "Licenses" + "fullName": "google.cloud.compute.v1.LicenseCodes", + "shortName": "LicenseCodes" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertLicenseRequest" + "type": "google.cloud.compute_v1.types.GetLicenseCodeRequest" }, { "name": "project", "type": "str" }, { - "name": "license_resource", - "type": "google.cloud.compute_v1.types.License" + "name": "license_code", + "type": "str" }, { "name": "retry", @@ -28497,22 +28945,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.LicenseCode", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_licenses_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_license_codes_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_Insert_sync", + "regionTag": "compute_v1_generated_LicenseCodes_Get_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -28522,48 +28970,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_insert_sync.py" + "title": "compute_v1_generated_license_codes_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.LicensesClient", - "shortName": "LicensesClient" + "fullName": "google.cloud.compute_v1.LicenseCodesClient", + "shortName": "LicenseCodesClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.list", + "fullName": "google.cloud.compute_v1.LicenseCodesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.Licenses.List", + "fullName": "google.cloud.compute.v1.LicenseCodes.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.Licenses", - "shortName": "Licenses" + "fullName": "google.cloud.compute.v1.LicenseCodes", + "shortName": "LicenseCodes" }, - "shortName": "List" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListLicensesRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsLicenseCodeRequest" }, { "name": "project", "type": "str" }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28577,14 +29033,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.licenses.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for List", - "file": "compute_v1_generated_licenses_list_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_license_codes_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_List_sync", + "regionTag": "compute_v1_generated_LicenseCodes_TestIamPermissions_sync", "segments": [ { "end": 52, @@ -28602,22 +29058,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_list_sync.py" + "title": "compute_v1_generated_license_codes_test_iam_permissions_sync.py" }, { "canonical": true, @@ -28626,32 +29082,28 @@ "fullName": "google.cloud.compute_v1.LicensesClient", "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.LicensesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.Licenses.SetIamPolicy", + "fullName": "google.cloud.compute.v1.Licenses.Delete", "service": { "fullName": "google.cloud.compute.v1.Licenses", "shortName": "Licenses" }, - "shortName": "SetIamPolicy" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyLicenseRequest" + "type": "google.cloud.compute_v1.types.DeleteLicenseRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "license_", "type": "str" }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28665,14 +29117,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_licenses_set_iam_policy_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_licenses_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_Licenses_Delete_sync", "segments": [ { "end": 52, @@ -28705,7 +29157,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_set_iam_policy_sync.py" + "title": "compute_v1_generated_licenses_delete_sync.py" }, { "canonical": true, @@ -28714,19 +29166,19 @@ "fullName": "google.cloud.compute_v1.LicensesClient", "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.LicensesClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.Licenses.TestIamPermissions", + "fullName": "google.cloud.compute.v1.Licenses.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.Licenses", "shortName": "Licenses" }, - "shortName": "TestIamPermissions" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsLicenseRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyLicenseRequest" }, { "name": "project", @@ -28736,10 +29188,6 @@ "name": "resource", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28753,14 +29201,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_licenses_test_iam_permissions_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_licenses_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_Licenses_GetIamPolicy_sync", "segments": [ { "end": 52, @@ -28793,7 +29241,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_test_iam_permissions_sync.py" + "title": "compute_v1_generated_licenses_get_iam_policy_sync.py" }, { "canonical": true, @@ -28802,19 +29250,19 @@ "fullName": "google.cloud.compute_v1.LicensesClient", "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.LicensesClient.update", + "fullName": "google.cloud.compute_v1.LicensesClient.get", "method": { - "fullName": "google.cloud.compute.v1.Licenses.Update", + "fullName": "google.cloud.compute.v1.Licenses.Get", "service": { "fullName": "google.cloud.compute.v1.Licenses", "shortName": "Licenses" }, - "shortName": "Update" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateLicenseRequest" + "type": "google.cloud.compute_v1.types.GetLicenseRequest" }, { "name": "project", @@ -28824,10 +29272,6 @@ "name": "license_", "type": "str" }, - { - "name": "license_resource", - "type": "google.cloud.compute_v1.types.License" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -28841,14 +29285,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "resultType": "google.cloud.compute_v1.types.License", + "shortName": "get" }, - "description": "Sample for Update", - "file": "compute_v1_generated_licenses_update_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_licenses_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Licenses_Update_sync", + "regionTag": "compute_v1_generated_Licenses_Get_sync", "segments": [ { "end": 52, @@ -28881,36 +29325,36 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_licenses_update_sync.py" + "title": "compute_v1_generated_licenses_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineImagesClient", - "shortName": "MachineImagesClient" + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.delete", + "fullName": "google.cloud.compute_v1.LicensesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.Delete", + "fullName": "google.cloud.compute.v1.Licenses.Insert", "service": { - "fullName": "google.cloud.compute.v1.MachineImages", - "shortName": "MachineImages" + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" }, - "shortName": "Delete" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteMachineImageRequest" + "type": "google.cloud.compute_v1.types.InsertLicenseRequest" }, { "name": "project", "type": "str" }, { - "name": "machine_image", - "type": "str" + "name": "license_resource", + "type": "google.cloud.compute_v1.types.License" }, { "name": "retry", @@ -28926,21 +29370,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "insert" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_machine_images_delete_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_licenses_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_Delete_sync", + "regionTag": "compute_v1_generated_Licenses_Insert_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -28950,52 +29394,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_delete_sync.py" + "title": "compute_v1_generated_licenses_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineImagesClient", - "shortName": "MachineImagesClient" + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.LicensesClient.list", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.GetIamPolicy", + "fullName": "google.cloud.compute.v1.Licenses.List", "service": { - "fullName": "google.cloud.compute.v1.MachineImages", - "shortName": "MachineImages" + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" }, - "shortName": "GetIamPolicy" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyMachineImageRequest" + "type": "google.cloud.compute_v1.types.ListLicensesRequest" }, { "name": "project", "type": "str" }, - { - "name": "resource", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29009,14 +29449,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.compute_v1.services.licenses.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_machine_images_get_iam_policy_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_licenses_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_Licenses_List_sync", "segments": [ { "end": 52, @@ -29034,52 +29474,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_get_iam_policy_sync.py" + "title": "compute_v1_generated_licenses_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineImagesClient", - "shortName": "MachineImagesClient" + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.get", + "fullName": "google.cloud.compute_v1.LicensesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.Get", + "fullName": "google.cloud.compute.v1.Licenses.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.MachineImages", - "shortName": "MachineImages" + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" }, - "shortName": "Get" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetMachineImageRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyLicenseRequest" }, { "name": "project", "type": "str" }, { - "name": "machine_image", + "name": "resource", "type": "str" }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29093,14 +29537,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.MachineImage", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Get", - "file": "compute_v1_generated_machine_images_get_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_licenses_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_Get_sync", + "regionTag": "compute_v1_generated_Licenses_SetIamPolicy_sync", "segments": [ { "end": 52, @@ -29133,36 +29577,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_get_sync.py" + "title": "compute_v1_generated_licenses_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineImagesClient", - "shortName": "MachineImagesClient" + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.insert", + "fullName": "google.cloud.compute_v1.LicensesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.Insert", + "fullName": "google.cloud.compute.v1.Licenses.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.MachineImages", - "shortName": "MachineImages" + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" }, - "shortName": "Insert" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertMachineImageRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsLicenseRequest" }, { "name": "project", "type": "str" }, { - "name": "machine_image_resource", - "type": "google.cloud.compute_v1.types.MachineImage" + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -29177,22 +29625,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_machine_images_insert_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_licenses_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_Insert_sync", + "regionTag": "compute_v1_generated_Licenses_TestIamPermissions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -29202,48 +29650,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_insert_sync.py" + "title": "compute_v1_generated_licenses_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineImagesClient", - "shortName": "MachineImagesClient" + "fullName": "google.cloud.compute_v1.LicensesClient", + "shortName": "LicensesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.list", + "fullName": "google.cloud.compute_v1.LicensesClient.update", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.List", + "fullName": "google.cloud.compute.v1.Licenses.Update", "service": { - "fullName": "google.cloud.compute.v1.MachineImages", - "shortName": "MachineImages" + "fullName": "google.cloud.compute.v1.Licenses", + "shortName": "Licenses" }, - "shortName": "List" + "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListMachineImagesRequest" + "type": "google.cloud.compute_v1.types.UpdateLicenseRequest" }, { "name": "project", "type": "str" }, + { + "name": "license_", + "type": "str" + }, + { + "name": "license_resource", + "type": "google.cloud.compute_v1.types.License" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29257,14 +29713,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.machine_images.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" }, - "description": "Sample for List", - "file": "compute_v1_generated_machine_images_list_sync.py", + "description": "Sample for Update", + "file": "compute_v1_generated_licenses_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_List_sync", + "regionTag": "compute_v1_generated_Licenses_Update_sync", "segments": [ { "end": 52, @@ -29282,22 +29738,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_list_sync.py" + "title": "compute_v1_generated_licenses_update_sync.py" }, { "canonical": true, @@ -29306,32 +29762,28 @@ "fullName": "google.cloud.compute_v1.MachineImagesClient", "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.MachineImagesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.SetIamPolicy", + "fullName": "google.cloud.compute.v1.MachineImages.Delete", "service": { "fullName": "google.cloud.compute.v1.MachineImages", "shortName": "MachineImages" }, - "shortName": "SetIamPolicy" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyMachineImageRequest" + "type": "google.cloud.compute_v1.types.DeleteMachineImageRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "machine_image", "type": "str" }, - { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29345,14 +29797,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_machine_images_set_iam_policy_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_machine_images_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_MachineImages_Delete_sync", "segments": [ { "end": 52, @@ -29385,7 +29837,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_set_iam_policy_sync.py" + "title": "compute_v1_generated_machine_images_delete_sync.py" }, { "canonical": true, @@ -29394,19 +29846,19 @@ "fullName": "google.cloud.compute_v1.MachineImagesClient", "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.set_labels", + "fullName": "google.cloud.compute_v1.MachineImagesClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.SetLabels", + "fullName": "google.cloud.compute.v1.MachineImages.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.MachineImages", "shortName": "MachineImages" }, - "shortName": "SetLabels" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsMachineImageRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyMachineImageRequest" }, { "name": "project", @@ -29416,10 +29868,6 @@ "name": "resource", "type": "str" }, - { - "name": "global_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29433,14 +29881,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_machine_images_set_labels_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_machine_images_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_SetLabels_sync", + "regionTag": "compute_v1_generated_MachineImages_GetIamPolicy_sync", "segments": [ { "end": 52, @@ -29473,7 +29921,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_set_labels_sync.py" + "title": "compute_v1_generated_machine_images_get_iam_policy_sync.py" }, { "canonical": true, @@ -29482,32 +29930,28 @@ "fullName": "google.cloud.compute_v1.MachineImagesClient", "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.MachineImagesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.MachineImagesClient.get", "method": { - "fullName": "google.cloud.compute.v1.MachineImages.TestIamPermissions", + "fullName": "google.cloud.compute.v1.MachineImages.Get", "service": { "fullName": "google.cloud.compute.v1.MachineImages", "shortName": "MachineImages" }, - "shortName": "TestIamPermissions" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsMachineImageRequest" + "type": "google.cloud.compute_v1.types.GetMachineImageRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "machine_image", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29521,14 +29965,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.MachineImage", + "shortName": "get" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_machine_images_test_iam_permissions_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_machine_images_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineImages_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_MachineImages_Get_sync", "segments": [ { "end": 52, @@ -29561,33 +30005,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_images_test_iam_permissions_sync.py" + "title": "compute_v1_generated_machine_images_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineTypesClient", - "shortName": "MachineTypesClient" + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.MachineTypesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.MachineImagesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.MachineTypes.AggregatedList", + "fullName": "google.cloud.compute.v1.MachineImages.Insert", "service": { - "fullName": "google.cloud.compute.v1.MachineTypes", - "shortName": "MachineTypes" + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" }, - "shortName": "AggregatedList" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListMachineTypesRequest" + "type": "google.cloud.compute_v1.types.InsertMachineImageRequest" }, { "name": "project", "type": "str" }, + { + "name": "machine_image_resource", + "type": "google.cloud.compute_v1.types.MachineImage" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29601,22 +30049,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.machine_types.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_machine_types_aggregated_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_machine_images_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineTypes_AggregatedList_sync", + "regionTag": "compute_v1_generated_MachineImages_Insert_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -29636,46 +30084,38 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 52, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_types_aggregated_list_sync.py" + "title": "compute_v1_generated_machine_images_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineTypesClient", - "shortName": "MachineTypesClient" + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.MachineTypesClient.get", + "fullName": "google.cloud.compute_v1.MachineImagesClient.list", "method": { - "fullName": "google.cloud.compute.v1.MachineTypes.Get", + "fullName": "google.cloud.compute.v1.MachineImages.List", "service": { - "fullName": "google.cloud.compute.v1.MachineTypes", - "shortName": "MachineTypes" + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetMachineTypeRequest" + "type": "google.cloud.compute_v1.types.ListMachineImagesRequest" }, { "name": "project", "type": "str" }, - { - "name": "zone", - "type": "str" - }, - { - "name": "machine_type", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29689,22 +30129,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.MachineType", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.machine_images.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_machine_types_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_machine_images_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineTypes_Get_sync", + "regionTag": "compute_v1_generated_MachineImages_List_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -29714,52 +30154,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_types_get_sync.py" + "title": "compute_v1_generated_machine_images_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.MachineTypesClient", - "shortName": "MachineTypesClient" + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.MachineTypesClient.list", + "fullName": "google.cloud.compute_v1.MachineImagesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.MachineTypes.List", + "fullName": "google.cloud.compute.v1.MachineImages.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.MachineTypes", - "shortName": "MachineTypes" + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" }, - "shortName": "List" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListMachineTypesRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyMachineImageRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "resource", "type": "str" }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29773,22 +30217,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.machine_types.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for List", - "file": "compute_v1_generated_machine_types_list_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_machine_images_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_MachineTypes_List_sync", + "regionTag": "compute_v1_generated_MachineImages_SetIamPolicy_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -29808,38 +30252,46 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_machine_types_list_sync.py" + "title": "compute_v1_generated_machine_images_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", - "shortName": "NetworkAttachmentsClient" + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.MachineImagesClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.AggregatedList", + "fullName": "google.cloud.compute.v1.MachineImages.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.NetworkAttachments", - "shortName": "NetworkAttachments" + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" }, - "shortName": "AggregatedList" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNetworkAttachmentsRequest" + "type": "google.cloud.compute_v1.types.SetLabelsMachineImageRequest" }, { "name": "project", "type": "str" }, + { + "name": "resource", + "type": "str" + }, + { + "name": "global_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetLabelsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -29853,14 +30305,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_attachments.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_network_attachments_aggregated_list_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_machine_images_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_AggregatedList_sync", + "regionTag": "compute_v1_generated_MachineImages_SetLabels_sync", "segments": [ { "end": 52, @@ -29878,55 +30330,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_aggregated_list_sync.py" + "title": "compute_v1_generated_machine_images_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", - "shortName": "NetworkAttachmentsClient" + "fullName": "google.cloud.compute_v1.MachineImagesClient", + "shortName": "MachineImagesClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.delete", + "fullName": "google.cloud.compute_v1.MachineImagesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.Delete", + "fullName": "google.cloud.compute.v1.MachineImages.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.NetworkAttachments", - "shortName": "NetworkAttachments" + "fullName": "google.cloud.compute.v1.MachineImages", + "shortName": "MachineImages" }, - "shortName": "Delete" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsMachineImageRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "resource", "type": "str" }, { - "name": "network_attachment", - "type": "str" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -29941,22 +30393,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_network_attachments_delete_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_machine_images_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_Delete_sync", + "regionTag": "compute_v1_generated_MachineImages_TestIamPermissions_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -29966,56 +30418,48 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_delete_sync.py" + "title": "compute_v1_generated_machine_images_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", - "shortName": "NetworkAttachmentsClient" + "fullName": "google.cloud.compute_v1.MachineTypesClient", + "shortName": "MachineTypesClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.MachineTypesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.GetIamPolicy", + "fullName": "google.cloud.compute.v1.MachineTypes.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.NetworkAttachments", - "shortName": "NetworkAttachments" + "fullName": "google.cloud.compute.v1.MachineTypes", + "shortName": "MachineTypes" }, - "shortName": "GetIamPolicy" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.AggregatedListMachineTypesRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30029,22 +30473,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.compute_v1.services.machine_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_network_attachments_get_iam_policy_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_machine_types_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_MachineTypes_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -30054,54 +30498,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_get_iam_policy_sync.py" + "title": "compute_v1_generated_machine_types_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", - "shortName": "NetworkAttachmentsClient" + "fullName": "google.cloud.compute_v1.MachineTypesClient", + "shortName": "MachineTypesClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.get", + "fullName": "google.cloud.compute_v1.MachineTypesClient.get", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.Get", + "fullName": "google.cloud.compute.v1.MachineTypes.Get", "service": { - "fullName": "google.cloud.compute.v1.NetworkAttachments", - "shortName": "NetworkAttachments" + "fullName": "google.cloud.compute.v1.MachineTypes", + "shortName": "MachineTypes" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.GetMachineTypeRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, { - "name": "network_attachment", + "name": "machine_type", "type": "str" }, { @@ -30117,14 +30561,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NetworkAttachment", + "resultType": "google.cloud.compute_v1.types.MachineType", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_network_attachments_get_sync.py", + "file": "compute_v1_generated_machine_types_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_Get_sync", + "regionTag": "compute_v1_generated_MachineTypes_Get_sync", "segments": [ { "end": 53, @@ -30157,41 +30601,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_get_sync.py" + "title": "compute_v1_generated_machine_types_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", - "shortName": "NetworkAttachmentsClient" + "fullName": "google.cloud.compute_v1.MachineTypesClient", + "shortName": "MachineTypesClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.insert", + "fullName": "google.cloud.compute_v1.MachineTypesClient.list", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.Insert", + "fullName": "google.cloud.compute.v1.MachineTypes.List", "service": { - "fullName": "google.cloud.compute.v1.NetworkAttachments", - "shortName": "NetworkAttachments" + "fullName": "google.cloud.compute.v1.MachineTypes", + "shortName": "MachineTypes" }, - "shortName": "Insert" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.ListMachineTypesRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, - { - "name": "network_attachment_resource", - "type": "google.cloud.compute_v1.types.NetworkAttachment" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30205,22 +30645,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.machine_types.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_network_attachments_insert_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_machine_types_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_Insert_sync", + "regionTag": "compute_v1_generated_MachineTypes_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -30240,12 +30680,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_insert_sync.py" + "title": "compute_v1_generated_machine_types_list_sync.py" }, { "canonical": true, @@ -30254,28 +30694,24 @@ "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.list", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.List", + "fullName": "google.cloud.compute.v1.NetworkAttachments.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.NetworkAttachments", "shortName": "NetworkAttachments" }, - "shortName": "List" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworkAttachmentsRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNetworkAttachmentsRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30289,22 +30725,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_attachments.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.services.network_attachments.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for List", - "file": "compute_v1_generated_network_attachments_list_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_attachments_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_List_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -30314,22 +30750,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_list_sync.py" + "title": "compute_v1_generated_network_attachments_aggregated_list_sync.py" }, { "canonical": true, @@ -30338,19 +30774,19 @@ "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.patch", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.Patch", + "fullName": "google.cloud.compute.v1.NetworkAttachments.Delete", "service": { "fullName": "google.cloud.compute.v1.NetworkAttachments", "shortName": "NetworkAttachments" }, - "shortName": "Patch" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.DeleteNetworkAttachmentRequest" }, { "name": "project", @@ -30364,10 +30800,6 @@ "name": "network_attachment", "type": "str" }, - { - "name": "network_attachment_resource", - "type": "google.cloud.compute_v1.types.NetworkAttachment" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30382,13 +30814,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "delete" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_network_attachments_patch_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_network_attachments_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_Patch_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_Delete_sync", "segments": [ { "end": 53, @@ -30421,7 +30853,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_patch_sync.py" + "title": "compute_v1_generated_network_attachments_delete_sync.py" }, { "canonical": true, @@ -30430,19 +30862,19 @@ "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.SetIamPolicy", + "fullName": "google.cloud.compute.v1.NetworkAttachments.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.NetworkAttachments", "shortName": "NetworkAttachments" }, - "shortName": "SetIamPolicy" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyNetworkAttachmentRequest" }, { "name": "project", @@ -30456,10 +30888,6 @@ "name": "resource", "type": "str" }, - { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30474,13 +30902,13 @@ } ], "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "shortName": "get_iam_policy" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_network_attachments_set_iam_policy_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_network_attachments_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -30513,7 +30941,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_set_iam_policy_sync.py" + "title": "compute_v1_generated_network_attachments_get_iam_policy_sync.py" }, { "canonical": true, @@ -30522,19 +30950,19 @@ "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.get", "method": { - "fullName": "google.cloud.compute.v1.NetworkAttachments.TestIamPermissions", + "fullName": "google.cloud.compute.v1.NetworkAttachments.Get", "service": { "fullName": "google.cloud.compute.v1.NetworkAttachments", "shortName": "NetworkAttachments" }, - "shortName": "TestIamPermissions" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkAttachmentRequest" + "type": "google.cloud.compute_v1.types.GetNetworkAttachmentRequest" }, { "name": "project", @@ -30545,13 +30973,9 @@ "type": "str" }, { - "name": "resource", + "name": "network_attachment", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30565,14 +30989,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.NetworkAttachment", + "shortName": "get" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_network_attachments_test_iam_permissions_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_network_attachments_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkAttachments_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_Get_sync", "segments": [ { "end": 53, @@ -30605,33 +31029,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_attachments_test_iam_permissions_sync.py" + "title": "compute_v1_generated_network_attachments_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", - "shortName": "NetworkEdgeSecurityServicesClient" + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.AggregatedList", + "fullName": "google.cloud.compute.v1.NetworkAttachments.Insert", "service": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", - "shortName": "NetworkEdgeSecurityServices" + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" }, - "shortName": "AggregatedList" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNetworkEdgeSecurityServicesRequest" + "type": "google.cloud.compute_v1.types.InsertNetworkAttachmentRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "network_attachment_resource", + "type": "google.cloud.compute_v1.types.NetworkAttachment" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30645,14 +31077,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_edge_security_services.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_network_edge_security_services_aggregated_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_network_attachments_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_AggregatedList_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_Insert_sync", "segments": [ { "end": 52, @@ -30670,43 +31102,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_edge_security_services_aggregated_list_sync.py" + "title": "compute_v1_generated_network_attachments_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", - "shortName": "NetworkEdgeSecurityServicesClient" + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.delete", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.list", "method": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Delete", + "fullName": "google.cloud.compute.v1.NetworkAttachments.List", "service": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", - "shortName": "NetworkEdgeSecurityServices" + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" }, - "shortName": "Delete" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNetworkEdgeSecurityServiceRequest" + "type": "google.cloud.compute_v1.types.ListNetworkAttachmentsRequest" }, { "name": "project", @@ -30716,10 +31148,6 @@ "name": "region", "type": "str" }, - { - "name": "network_edge_security_service", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30733,14 +31161,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.network_attachments.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_network_edge_security_services_delete_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_network_attachments_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Delete_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_List_sync", "segments": [ { "end": 53, @@ -30758,43 +31186,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_edge_security_services_delete_sync.py" + "title": "compute_v1_generated_network_attachments_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", - "shortName": "NetworkEdgeSecurityServicesClient" + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.get", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Get", + "fullName": "google.cloud.compute.v1.NetworkAttachments.Patch", "service": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", - "shortName": "NetworkEdgeSecurityServices" + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" }, - "shortName": "Get" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNetworkEdgeSecurityServiceRequest" + "type": "google.cloud.compute_v1.types.PatchNetworkAttachmentRequest" }, { "name": "project", @@ -30805,9 +31233,13 @@ "type": "str" }, { - "name": "network_edge_security_service", + "name": "network_attachment", "type": "str" }, + { + "name": "network_attachment_resource", + "type": "google.cloud.compute_v1.types.NetworkAttachment" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -30821,14 +31253,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NetworkEdgeSecurityService", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for Get", - "file": "compute_v1_generated_network_edge_security_services_get_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_network_attachments_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Get_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_Patch_sync", "segments": [ { "end": 53, @@ -30861,28 +31293,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_edge_security_services_get_sync.py" + "title": "compute_v1_generated_network_attachments_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", - "shortName": "NetworkEdgeSecurityServicesClient" + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.insert", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Insert", + "fullName": "google.cloud.compute.v1.NetworkAttachments.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", - "shortName": "NetworkEdgeSecurityServices" + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" }, - "shortName": "Insert" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNetworkEdgeSecurityServiceRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkAttachmentRequest" }, { "name": "project", @@ -30893,8 +31325,12 @@ "type": "str" }, { - "name": "network_edge_security_service_resource", - "type": "google.cloud.compute_v1.types.NetworkEdgeSecurityService" + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" }, { "name": "retry", @@ -30909,22 +31345,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_network_edge_security_services_insert_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_network_attachments_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Insert_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_SetIamPolicy_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -30934,43 +31370,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_edge_security_services_insert_sync.py" + "title": "compute_v1_generated_network_attachments_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", - "shortName": "NetworkEdgeSecurityServicesClient" + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient", + "shortName": "NetworkAttachmentsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.patch", + "fullName": "google.cloud.compute_v1.NetworkAttachmentsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Patch", + "fullName": "google.cloud.compute.v1.NetworkAttachments.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", - "shortName": "NetworkEdgeSecurityServices" + "fullName": "google.cloud.compute.v1.NetworkAttachments", + "shortName": "NetworkAttachments" }, - "shortName": "Patch" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchNetworkEdgeSecurityServiceRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkAttachmentRequest" }, { "name": "project", @@ -30981,12 +31417,12 @@ "type": "str" }, { - "name": "network_edge_security_service", + "name": "resource", "type": "str" }, { - "name": "network_edge_security_service_resource", - "type": "google.cloud.compute_v1.types.NetworkEdgeSecurityService" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -31001,14 +31437,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_network_edge_security_services_patch_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_network_attachments_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Patch_sync", + "regionTag": "compute_v1_generated_NetworkAttachments_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -31041,28 +31477,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_edge_security_services_patch_sync.py" + "title": "compute_v1_generated_network_attachments_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", - "shortName": "NetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.AggregatedList", + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", - "shortName": "NetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" }, "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNetworkEdgeSecurityServicesRequest" }, { "name": "project", @@ -31081,14 +31517,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.AggregatedListPager", + "resultType": "google.cloud.compute_v1.services.network_edge_security_services.pagers.AggregatedListPager", "shortName": "aggregated_list" }, "description": "Sample for AggregatedList", - "file": "compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py", + "file": "compute_v1_generated_network_edge_security_services_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_AggregatedList_sync", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_AggregatedList_sync", "segments": [ { "end": 52, @@ -31121,45 +31557,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py" + "title": "compute_v1_generated_network_edge_security_services_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", - "shortName": "NetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.attach_network_endpoints", + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.AttachNetworkEndpoints", + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Delete", "service": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", - "shortName": "NetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" }, - "shortName": "AttachNetworkEndpoints" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.DeleteNetworkEdgeSecurityServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "region", "type": "str" }, { - "name": "network_endpoint_group", + "name": "network_edge_security_service", "type": "str" }, - { - "name": "network_endpoint_groups_attach_endpoints_request_resource", - "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsAttachEndpointsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31174,13 +31606,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "attach_network_endpoints" + "shortName": "delete" }, - "description": "Sample for AttachNetworkEndpoints", - "file": "compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_network_edge_security_services_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_AttachNetworkEndpoints_sync", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Delete_sync", "segments": [ { "end": 53, @@ -31213,39 +31645,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py" + "title": "compute_v1_generated_network_edge_security_services_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", - "shortName": "NetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.delete", + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.get", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Delete", + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Get", "service": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", - "shortName": "NetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" }, - "shortName": "Delete" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.GetNetworkEdgeSecurityServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "region", "type": "str" }, { - "name": "network_endpoint_group", + "name": "network_edge_security_service", "type": "str" }, { @@ -31261,14 +31693,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.NetworkEdgeSecurityService", + "shortName": "get" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_network_endpoint_groups_delete_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_network_edge_security_services_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_Delete_sync", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Get_sync", "segments": [ { "end": 53, @@ -31301,44 +31733,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_delete_sync.py" + "title": "compute_v1_generated_network_edge_security_services_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", - "shortName": "NetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.detach_network_endpoints", + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.DetachNetworkEndpoints", + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Insert", "service": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", - "shortName": "NetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" }, - "shortName": "DetachNetworkEndpoints" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.InsertNetworkEdgeSecurityServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", - "type": "str" - }, - { - "name": "network_endpoint_group", + "name": "region", "type": "str" }, { - "name": "network_endpoint_groups_detach_endpoints_request_resource", - "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsDetachEndpointsRequest" + "name": "network_edge_security_service_resource", + "type": "google.cloud.compute_v1.types.NetworkEdgeSecurityService" }, { "name": "retry", @@ -31354,21 +31782,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "detach_network_endpoints" + "shortName": "insert" }, - "description": "Sample for DetachNetworkEndpoints", - "file": "compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_network_edge_security_services_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_DetachNetworkEndpoints_sync", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -31378,56 +31806,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py" + "title": "compute_v1_generated_network_edge_security_services_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", - "shortName": "NetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient", + "shortName": "NetworkEdgeSecurityServicesClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.get", + "fullName": "google.cloud.compute_v1.NetworkEdgeSecurityServicesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Get", + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices.Patch", "service": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", - "shortName": "NetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.NetworkEdgeSecurityServices", + "shortName": "NetworkEdgeSecurityServices" }, - "shortName": "Get" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.PatchNetworkEdgeSecurityServiceRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "region", "type": "str" }, { - "name": "network_endpoint_group", + "name": "network_edge_security_service", "type": "str" }, + { + "name": "network_edge_security_service_resource", + "type": "google.cloud.compute_v1.types.NetworkEdgeSecurityService" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31441,14 +31873,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for Get", - "file": "compute_v1_generated_network_endpoint_groups_get_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_network_edge_security_services_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_Get_sync", + "regionTag": "compute_v1_generated_NetworkEdgeSecurityServices_Patch_sync", "segments": [ { "end": 53, @@ -31481,7 +31913,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_get_sync.py" + "title": "compute_v1_generated_network_edge_security_services_patch_sync.py" }, { "canonical": true, @@ -31490,32 +31922,24 @@ "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.insert", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Insert", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", "shortName": "NetworkEndpointGroups" }, - "shortName": "Insert" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNetworkEndpointGroupsRequest" }, { "name": "project", "type": "str" }, - { - "name": "zone", - "type": "str" - }, - { - "name": "network_endpoint_group_resource", - "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31529,14 +31953,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_network_endpoint_groups_insert_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_Insert_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_AggregatedList_sync", "segments": [ { "end": 52, @@ -31554,22 +31978,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_insert_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_aggregated_list_sync.py" }, { "canonical": true, @@ -31578,19 +32002,19 @@ "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.list_network_endpoints", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.attach_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.ListNetworkEndpoints", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.AttachNetworkEndpoints", "service": { "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", "shortName": "NetworkEndpointGroups" }, - "shortName": "ListNetworkEndpoints" + "shortName": "AttachNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest" + "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsNetworkEndpointGroupRequest" }, { "name": "project", @@ -31605,8 +32029,8 @@ "type": "str" }, { - "name": "network_endpoint_groups_list_endpoints_request_resource", - "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest" + "name": "network_endpoint_groups_attach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsAttachEndpointsRequest" }, { "name": "retry", @@ -31621,22 +32045,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListNetworkEndpointsPager", - "shortName": "list_network_endpoints" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "attach_network_endpoints" }, - "description": "Sample for ListNetworkEndpoints", - "file": "compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py", + "description": "Sample for AttachNetworkEndpoints", + "file": "compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_ListNetworkEndpoints_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_AttachNetworkEndpoints_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -31656,12 +32080,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_attach_network_endpoints_sync.py" }, { "canonical": true, @@ -31670,19 +32094,19 @@ "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.list", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.List", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Delete", "service": { "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", "shortName": "NetworkEndpointGroups" }, - "shortName": "List" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest" + "type": "google.cloud.compute_v1.types.DeleteNetworkEndpointGroupRequest" }, { "name": "project", @@ -31692,6 +32116,10 @@ "name": "zone", "type": "str" }, + { + "name": "network_endpoint_group", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -31705,14 +32133,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for List", - "file": "compute_v1_generated_network_endpoint_groups_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_network_endpoint_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_List_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_Delete_sync", "segments": [ { "end": 53, @@ -31730,22 +32158,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_list_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_delete_sync.py" }, { "canonical": true, @@ -31754,19 +32182,19 @@ "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.detach_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.TestIamPermissions", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.DetachNetworkEndpoints", "service": { "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", "shortName": "NetworkEndpointGroups" }, - "shortName": "TestIamPermissions" + "shortName": "DetachNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsNetworkEndpointGroupRequest" }, { "name": "project", @@ -31777,12 +32205,12 @@ "type": "str" }, { - "name": "resource", + "name": "network_endpoint_group", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "network_endpoint_groups_detach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsDetachEndpointsRequest" }, { "name": "retry", @@ -31797,14 +32225,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "detach_network_endpoints" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py", + "description": "Sample for DetachNetworkEndpoints", + "file": "compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkEndpointGroups_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_DetachNetworkEndpoints_sync", "segments": [ { "end": 53, @@ -31837,40 +32265,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_detach_network_endpoints_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", - "shortName": "NetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_association", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddAssociation", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", - "shortName": "NetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" }, - "shortName": "AddAssociation" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddAssociationNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetNetworkEndpointGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy", + "name": "zone", "type": "str" }, { - "name": "firewall_policy_association_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + "name": "network_endpoint_group", + "type": "str" }, { "name": "retry", @@ -31885,22 +32313,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_association" + "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", + "shortName": "get" }, - "description": "Sample for AddAssociation", - "file": "compute_v1_generated_network_firewall_policies_add_association_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_network_endpoint_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddAssociation_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -31910,55 +32338,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_add_association_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", - "shortName": "NetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_packet_mirroring_rule", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddPacketMirroringRule", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", - "shortName": "NetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" }, - "shortName": "AddPacketMirroringRule" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddPacketMirroringRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertNetworkEndpointGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy", + "name": "zone", "type": "str" }, { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + "name": "network_endpoint_group_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" }, { "name": "retry", @@ -31974,13 +32402,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_packet_mirroring_rule" + "shortName": "insert" }, - "description": "Sample for AddPacketMirroringRule", - "file": "compute_v1_generated_network_firewall_policies_add_packet_mirroring_rule_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_network_endpoint_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddPacketMirroringRule_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_Insert_sync", "segments": [ { "end": 52, @@ -32013,40 +32441,44 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_add_packet_mirroring_rule_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", - "shortName": "NetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_rule", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.list_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddRule", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.ListNetworkEndpoints", "service": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", - "shortName": "NetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" }, - "shortName": "AddRule" + "shortName": "ListNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.ListNetworkEndpointsNetworkEndpointGroupsRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy", + "name": "zone", "type": "str" }, { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "network_endpoint_groups_list_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroupsListEndpointsRequest" }, { "name": "retry", @@ -32061,22 +32493,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_rule" + "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListNetworkEndpointsPager", + "shortName": "list_network_endpoints" }, - "description": "Sample for AddRule", - "file": "compute_v1_generated_network_firewall_policies_add_rule_sync.py", + "description": "Sample for ListNetworkEndpoints", + "file": "compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddRule_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_ListNetworkEndpoints_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -32086,48 +32518,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_add_rule_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_list_network_endpoints_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", - "shortName": "NetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AggregatedList", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.List", "service": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", - "shortName": "NetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" }, - "shortName": "AggregatedList" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNetworkFirewallPoliciesRequest" + "type": "google.cloud.compute_v1.types.ListNetworkEndpointGroupsRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32141,22 +32577,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_firewall_policies.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.services.network_endpoint_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_network_firewall_policies_aggregated_list_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_network_endpoint_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AggregatedList_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -32166,52 +32602,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_aggregated_list_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", - "shortName": "NetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient", + "shortName": "NetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.clone_rules", + "fullName": "google.cloud.compute_v1.NetworkEndpointGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.CloneRules", + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", - "shortName": "NetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.NetworkEndpointGroups", + "shortName": "NetworkEndpointGroups" }, - "shortName": "CloneRules" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CloneRulesNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkEndpointGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy", + "name": "zone", "type": "str" }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32225,22 +32669,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "clone_rules" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for CloneRules", - "file": "compute_v1_generated_network_firewall_policies_clone_rules_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_CloneRules_sync", + "regionTag": "compute_v1_generated_NetworkEndpointGroups_TestIamPermissions_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -32250,22 +32694,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_clone_rules_sync.py" + "title": "compute_v1_generated_network_endpoint_groups_test_iam_permissions_sync.py" }, { "canonical": true, @@ -32274,19 +32718,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_association", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Delete", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddAssociation", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "Delete" + "shortName": "AddAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.AddAssociationNetworkFirewallPolicyRequest" }, { "name": "project", @@ -32296,6 +32740,10 @@ "name": "firewall_policy", "type": "str" }, + { + "name": "firewall_policy_association_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32310,13 +32758,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "add_association" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_network_firewall_policies_delete_sync.py", + "description": "Sample for AddAssociation", + "file": "compute_v1_generated_network_firewall_policies_add_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Delete_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddAssociation_sync", "segments": [ { "end": 52, @@ -32349,7 +32797,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_delete_sync.py" + "title": "compute_v1_generated_network_firewall_policies_add_association_sync.py" }, { "canonical": true, @@ -32358,19 +32806,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_association", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_packet_mirroring_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetAssociation", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddPacketMirroringRule", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "GetAssociation" + "shortName": "AddPacketMirroringRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetAssociationNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.AddPacketMirroringRuleNetworkFirewallPolicyRequest" }, { "name": "project", @@ -32380,6 +32828,10 @@ "name": "firewall_policy", "type": "str" }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32393,14 +32845,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", - "shortName": "get_association" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_packet_mirroring_rule" }, - "description": "Sample for GetAssociation", - "file": "compute_v1_generated_network_firewall_policies_get_association_sync.py", + "description": "Sample for AddPacketMirroringRule", + "file": "compute_v1_generated_network_firewall_policies_add_packet_mirroring_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetAssociation_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddPacketMirroringRule_sync", "segments": [ { "end": 52, @@ -32433,7 +32885,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_get_association_sync.py" + "title": "compute_v1_generated_network_firewall_policies_add_packet_mirroring_rule_sync.py" }, { "canonical": true, @@ -32442,28 +32894,32 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.add_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetIamPolicy", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AddRule", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "GetIamPolicy" + "shortName": "AddRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.AddRuleNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32477,14 +32933,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py", + "description": "Sample for AddRule", + "file": "compute_v1_generated_network_firewall_policies_add_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AddRule_sync", "segments": [ { "end": 52, @@ -32517,7 +32973,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py" + "title": "compute_v1_generated_network_firewall_policies_add_rule_sync.py" }, { "canonical": true, @@ -32526,28 +32982,24 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_packet_mirroring_rule", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetPacketMirroringRule", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "GetPacketMirroringRule" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetPacketMirroringRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNetworkFirewallPoliciesRequest" }, { "name": "project", "type": "str" }, - { - "name": "firewall_policy", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32561,14 +33013,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", - "shortName": "get_packet_mirroring_rule" + "resultType": "google.cloud.compute_v1.services.network_firewall_policies.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for GetPacketMirroringRule", - "file": "compute_v1_generated_network_firewall_policies_get_packet_mirroring_rule_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_network_firewall_policies_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetPacketMirroringRule_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_AggregatedList_sync", "segments": [ { "end": 52, @@ -32586,22 +33038,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_get_packet_mirroring_rule_sync.py" + "title": "compute_v1_generated_network_firewall_policies_aggregated_list_sync.py" }, { "canonical": true, @@ -32610,19 +33062,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_rule", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.clone_rules", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetRule", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.CloneRules", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "GetRule" + "shortName": "CloneRules" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.CloneRulesNetworkFirewallPolicyRequest" }, { "name": "project", @@ -32645,14 +33097,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", - "shortName": "get_rule" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "clone_rules" }, - "description": "Sample for GetRule", - "file": "compute_v1_generated_network_firewall_policies_get_rule_sync.py", + "description": "Sample for CloneRules", + "file": "compute_v1_generated_network_firewall_policies_clone_rules_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetRule_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_CloneRules_sync", "segments": [ { "end": 52, @@ -32685,7 +33137,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_get_rule_sync.py" + "title": "compute_v1_generated_network_firewall_policies_clone_rules_sync.py" }, { "canonical": true, @@ -32694,19 +33146,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Get", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Delete", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "Get" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.DeleteNetworkFirewallPolicyRequest" }, { "name": "project", @@ -32729,14 +33181,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicy", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for Get", - "file": "compute_v1_generated_network_firewall_policies_get_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_network_firewall_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Get_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Delete_sync", "segments": [ { "end": 52, @@ -32769,7 +33221,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_get_sync.py" + "title": "compute_v1_generated_network_firewall_policies_delete_sync.py" }, { "canonical": true, @@ -32778,106 +33230,26 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_association", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Insert", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetAssociation", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "Insert" + "shortName": "GetAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetAssociationNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicy" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" - }, - "description": "Sample for Insert", - "file": "compute_v1_generated_network_firewall_policies_insert_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Insert_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_network_firewall_policies_insert_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", - "shortName": "NetworkFirewallPoliciesClient" - }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.list", - "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.List", - "service": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", - "shortName": "NetworkFirewallPolicies" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworkFirewallPoliciesRequest" - }, - { - "name": "project", + "name": "firewall_policy", "type": "str" }, { @@ -32893,14 +33265,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_firewall_policies.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", + "shortName": "get_association" }, - "description": "Sample for List", - "file": "compute_v1_generated_network_firewall_policies_list_sync.py", + "description": "Sample for GetAssociation", + "file": "compute_v1_generated_network_firewall_policies_get_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_List_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetAssociation_sync", "segments": [ { "end": 52, @@ -32918,22 +33290,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_list_sync.py" + "title": "compute_v1_generated_network_firewall_policies_get_association_sync.py" }, { "canonical": true, @@ -32942,32 +33314,28 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch_packet_mirroring_rule", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.PatchPacketMirroringRule", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "PatchPacketMirroringRule" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchPacketMirroringRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy", + "name": "resource", "type": "str" }, - { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -32981,14 +33349,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_packet_mirroring_rule" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for PatchPacketMirroringRule", - "file": "compute_v1_generated_network_firewall_policies_patch_packet_mirroring_rule_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_PatchPacketMirroringRule_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetIamPolicy_sync", "segments": [ { "end": 52, @@ -33021,7 +33389,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_patch_packet_mirroring_rule_sync.py" + "title": "compute_v1_generated_network_firewall_policies_get_iam_policy_sync.py" }, { "canonical": true, @@ -33030,19 +33398,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch_rule", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_packet_mirroring_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.PatchRule", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetPacketMirroringRule", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "PatchRule" + "shortName": "GetPacketMirroringRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetPacketMirroringRuleNetworkFirewallPolicyRequest" }, { "name": "project", @@ -33052,10 +33420,6 @@ "name": "firewall_policy", "type": "str" }, - { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33069,14 +33433,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_rule" + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_packet_mirroring_rule" }, - "description": "Sample for PatchRule", - "file": "compute_v1_generated_network_firewall_policies_patch_rule_sync.py", + "description": "Sample for GetPacketMirroringRule", + "file": "compute_v1_generated_network_firewall_policies_get_packet_mirroring_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_PatchRule_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetPacketMirroringRule_sync", "segments": [ { "end": 52, @@ -33109,7 +33473,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_patch_rule_sync.py" + "title": "compute_v1_generated_network_firewall_policies_get_packet_mirroring_rule_sync.py" }, { "canonical": true, @@ -33118,19 +33482,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Patch", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.GetRule", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "Patch" + "shortName": "GetRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRuleNetworkFirewallPolicyRequest" }, { "name": "project", @@ -33140,10 +33504,6 @@ "name": "firewall_policy", "type": "str" }, - { - "name": "firewall_policy_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicy" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33157,14 +33517,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_rule" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_network_firewall_policies_patch_sync.py", + "description": "Sample for GetRule", + "file": "compute_v1_generated_network_firewall_policies_get_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Patch_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_GetRule_sync", "segments": [ { "end": 52, @@ -33197,7 +33557,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_patch_sync.py" + "title": "compute_v1_generated_network_firewall_policies_get_rule_sync.py" }, { "canonical": true, @@ -33206,19 +33566,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_association", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemoveAssociation", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Get", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "RemoveAssociation" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveAssociationNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetNetworkFirewallPolicyRequest" }, { "name": "project", @@ -33241,14 +33601,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_association" + "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "shortName": "get" }, - "description": "Sample for RemoveAssociation", - "file": "compute_v1_generated_network_firewall_policies_remove_association_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_network_firewall_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemoveAssociation_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Get_sync", "segments": [ { "end": 52, @@ -33281,7 +33641,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_remove_association_sync.py" + "title": "compute_v1_generated_network_firewall_policies_get_sync.py" }, { "canonical": true, @@ -33290,27 +33650,27 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_packet_mirroring_rule", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemovePacketMirroringRule", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Insert", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "RemovePacketMirroringRule" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemovePacketMirroringRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "firewall_policy", - "type": "str" + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" }, { "name": "retry", @@ -33326,13 +33686,93 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_packet_mirroring_rule" + "shortName": "insert" }, - "description": "Sample for RemovePacketMirroringRule", - "file": "compute_v1_generated_network_firewall_policies_remove_packet_mirroring_rule_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_network_firewall_policies_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemovePacketMirroringRule_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Insert_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_network_firewall_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListNetworkFirewallPoliciesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.network_firewall_policies.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_network_firewall_policies_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_List_sync", "segments": [ { "end": 52, @@ -33350,22 +33790,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_remove_packet_mirroring_rule_sync.py" + "title": "compute_v1_generated_network_firewall_policies_list_sync.py" }, { "canonical": true, @@ -33374,19 +33814,19 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_rule", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch_packet_mirroring_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemoveRule", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.PatchPacketMirroringRule", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "RemoveRule" + "shortName": "PatchPacketMirroringRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveRuleNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchPacketMirroringRuleNetworkFirewallPolicyRequest" }, { "name": "project", @@ -33396,6 +33836,10 @@ "name": "firewall_policy", "type": "str" }, + { + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33410,13 +33854,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_rule" + "shortName": "patch_packet_mirroring_rule" }, - "description": "Sample for RemoveRule", - "file": "compute_v1_generated_network_firewall_policies_remove_rule_sync.py", + "description": "Sample for PatchPacketMirroringRule", + "file": "compute_v1_generated_network_firewall_policies_patch_packet_mirroring_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemoveRule_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_PatchPacketMirroringRule_sync", "segments": [ { "end": 52, @@ -33449,7 +33893,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_remove_rule_sync.py" + "title": "compute_v1_generated_network_firewall_policies_patch_packet_mirroring_rule_sync.py" }, { "canonical": true, @@ -33458,31 +33902,31 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.SetIamPolicy", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.PatchRule", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "SetIamPolicy" + "shortName": "PatchRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchRuleNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, { - "name": "global_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" }, { "name": "retry", @@ -33497,14 +33941,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py", + "description": "Sample for PatchRule", + "file": "compute_v1_generated_network_firewall_policies_patch_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_PatchRule_sync", "segments": [ { "end": 52, @@ -33537,7 +33981,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py" + "title": "compute_v1_generated_network_firewall_policies_patch_rule_sync.py" }, { "canonical": true, @@ -33546,31 +33990,31 @@ "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.TestIamPermissions", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.Patch", "service": { "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", "shortName": "NetworkFirewallPolicies" }, - "shortName": "TestIamPermissions" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" }, { "name": "retry", @@ -33585,14 +34029,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_network_firewall_policies_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkFirewallPolicies_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_Patch_sync", "segments": [ { "end": 52, @@ -33625,35 +34069,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py" + "title": "compute_v1_generated_network_firewall_policies_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkProfilesClient", - "shortName": "NetworkProfilesClient" + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkProfilesClient.get", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_association", "method": { - "fullName": "google.cloud.compute.v1.NetworkProfiles.Get", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemoveAssociation", "service": { - "fullName": "google.cloud.compute.v1.NetworkProfiles", - "shortName": "NetworkProfiles" + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" }, - "shortName": "Get" + "shortName": "RemoveAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNetworkProfileRequest" + "type": "google.cloud.compute_v1.types.RemoveAssociationNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "network_profile", + "name": "firewall_policy", "type": "str" }, { @@ -33669,14 +34113,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NetworkProfile", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_association" }, - "description": "Sample for Get", - "file": "compute_v1_generated_network_profiles_get_sync.py", + "description": "Sample for RemoveAssociation", + "file": "compute_v1_generated_network_firewall_policies_remove_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkProfiles_Get_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemoveAssociation_sync", "segments": [ { "end": 52, @@ -33709,33 +34153,37 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_profiles_get_sync.py" + "title": "compute_v1_generated_network_firewall_policies_remove_association_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworkProfilesClient", - "shortName": "NetworkProfilesClient" + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworkProfilesClient.list", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_packet_mirroring_rule", "method": { - "fullName": "google.cloud.compute.v1.NetworkProfiles.List", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemovePacketMirroringRule", "service": { - "fullName": "google.cloud.compute.v1.NetworkProfiles", - "shortName": "NetworkProfiles" + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" }, - "shortName": "List" + "shortName": "RemovePacketMirroringRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworkProfilesRequest" + "type": "google.cloud.compute_v1.types.RemovePacketMirroringRuleNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, + { + "name": "firewall_policy", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33749,14 +34197,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.network_profiles.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_packet_mirroring_rule" }, - "description": "Sample for List", - "file": "compute_v1_generated_network_profiles_list_sync.py", + "description": "Sample for RemovePacketMirroringRule", + "file": "compute_v1_generated_network_firewall_policies_remove_packet_mirroring_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NetworkProfiles_List_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemovePacketMirroringRule_sync", "segments": [ { "end": 52, @@ -33774,56 +34222,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_network_profiles_list_sync.py" + "title": "compute_v1_generated_network_firewall_policies_remove_packet_mirroring_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworksClient", - "shortName": "NetworksClient" + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.add_peering", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.remove_rule", "method": { - "fullName": "google.cloud.compute.v1.Networks.AddPeering", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.RemoveRule", "service": { - "fullName": "google.cloud.compute.v1.Networks", - "shortName": "Networks" + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" }, - "shortName": "AddPeering" + "shortName": "RemoveRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddPeeringNetworkRequest" + "type": "google.cloud.compute_v1.types.RemoveRuleNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "network", + "name": "firewall_policy", "type": "str" }, - { - "name": "networks_add_peering_request_resource", - "type": "google.cloud.compute_v1.types.NetworksAddPeeringRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33838,13 +34282,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_peering" + "shortName": "remove_rule" }, - "description": "Sample for AddPeering", - "file": "compute_v1_generated_networks_add_peering_sync.py", + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_network_firewall_policies_remove_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_AddPeering_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_RemoveRule_sync", "segments": [ { "end": 52, @@ -33877,37 +34321,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_add_peering_sync.py" + "title": "compute_v1_generated_network_firewall_policies_remove_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworksClient", - "shortName": "NetworksClient" + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.delete", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.Networks.Delete", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.Networks", - "shortName": "Networks" + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" }, - "shortName": "Delete" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNetworkRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "network", + "name": "resource", "type": "str" }, + { + "name": "global_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.GlobalSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -33921,14 +34369,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_networks_delete_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_Delete_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_SetIamPolicy_sync", "segments": [ { "end": 52, @@ -33961,37 +34409,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_delete_sync.py" + "title": "compute_v1_generated_network_firewall_policies_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworksClient", - "shortName": "NetworksClient" + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient", + "shortName": "NetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.get_effective_firewalls", + "fullName": "google.cloud.compute_v1.NetworkFirewallPoliciesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.Networks.GetEffectiveFirewalls", + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.Networks", - "shortName": "Networks" + "fullName": "google.cloud.compute.v1.NetworkFirewallPolicies", + "shortName": "NetworkFirewallPolicies" }, - "shortName": "GetEffectiveFirewalls" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsNetworkRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsNetworkFirewallPolicyRequest" }, { "name": "project", "type": "str" }, { - "name": "network", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34005,14 +34457,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponse", - "shortName": "get_effective_firewalls" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for GetEffectiveFirewalls", - "file": "compute_v1_generated_networks_get_effective_firewalls_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_GetEffectiveFirewalls_sync", + "regionTag": "compute_v1_generated_NetworkFirewallPolicies_TestIamPermissions_sync", "segments": [ { "end": 52, @@ -34045,35 +34497,35 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_get_effective_firewalls_sync.py" + "title": "compute_v1_generated_network_firewall_policies_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworksClient", - "shortName": "NetworksClient" + "fullName": "google.cloud.compute_v1.NetworkProfilesClient", + "shortName": "NetworkProfilesClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.get", + "fullName": "google.cloud.compute_v1.NetworkProfilesClient.get", "method": { - "fullName": "google.cloud.compute.v1.Networks.Get", + "fullName": "google.cloud.compute.v1.NetworkProfiles.Get", "service": { - "fullName": "google.cloud.compute.v1.Networks", - "shortName": "Networks" + "fullName": "google.cloud.compute.v1.NetworkProfiles", + "shortName": "NetworkProfiles" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNetworkRequest" + "type": "google.cloud.compute_v1.types.GetNetworkProfileRequest" }, { "name": "project", "type": "str" }, { - "name": "network", + "name": "network_profile", "type": "str" }, { @@ -34089,14 +34541,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Network", + "resultType": "google.cloud.compute_v1.types.NetworkProfile", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_networks_get_sync.py", + "file": "compute_v1_generated_network_profiles_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_Get_sync", + "regionTag": "compute_v1_generated_NetworkProfiles_Get_sync", "segments": [ { "end": 52, @@ -34129,37 +34581,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_get_sync.py" + "title": "compute_v1_generated_network_profiles_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NetworksClient", - "shortName": "NetworksClient" + "fullName": "google.cloud.compute_v1.NetworkProfilesClient", + "shortName": "NetworkProfilesClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.insert", + "fullName": "google.cloud.compute_v1.NetworkProfilesClient.list", "method": { - "fullName": "google.cloud.compute.v1.Networks.Insert", + "fullName": "google.cloud.compute.v1.NetworkProfiles.List", "service": { - "fullName": "google.cloud.compute.v1.Networks", - "shortName": "Networks" + "fullName": "google.cloud.compute.v1.NetworkProfiles", + "shortName": "NetworkProfiles" }, - "shortName": "Insert" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNetworkRequest" + "type": "google.cloud.compute_v1.types.ListNetworkProfilesRequest" }, { "name": "project", "type": "str" }, - { - "name": "network_resource", - "type": "google.cloud.compute_v1.types.Network" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34173,22 +34621,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.network_profiles.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_networks_insert_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_network_profiles_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_Insert_sync", + "regionTag": "compute_v1_generated_NetworkProfiles_List_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -34208,12 +34656,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_insert_sync.py" + "title": "compute_v1_generated_network_profiles_list_sync.py" }, { "canonical": true, @@ -34222,19 +34670,19 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.list_peering_routes", + "fullName": "google.cloud.compute_v1.NetworksClient.add_peering", "method": { - "fullName": "google.cloud.compute.v1.Networks.ListPeeringRoutes", + "fullName": "google.cloud.compute.v1.Networks.AddPeering", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "ListPeeringRoutes" + "shortName": "AddPeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest" + "type": "google.cloud.compute_v1.types.AddPeeringNetworkRequest" }, { "name": "project", @@ -34244,6 +34692,10 @@ "name": "network", "type": "str" }, + { + "name": "networks_add_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksAddPeeringRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34257,22 +34709,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.networks.pagers.ListPeeringRoutesPager", - "shortName": "list_peering_routes" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_peering" }, - "description": "Sample for ListPeeringRoutes", - "file": "compute_v1_generated_networks_list_peering_routes_sync.py", + "description": "Sample for AddPeering", + "file": "compute_v1_generated_networks_add_peering_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_ListPeeringRoutes_sync", + "regionTag": "compute_v1_generated_Networks_AddPeering_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -34292,12 +34744,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_list_peering_routes_sync.py" + "title": "compute_v1_generated_networks_add_peering_sync.py" }, { "canonical": true, @@ -34306,24 +34758,28 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.list", + "fullName": "google.cloud.compute_v1.NetworksClient.delete", "method": { - "fullName": "google.cloud.compute.v1.Networks.List", + "fullName": "google.cloud.compute.v1.Networks.Delete", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "List" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworksRequest" + "type": "google.cloud.compute_v1.types.DeleteNetworkRequest" }, { "name": "project", "type": "str" }, + { + "name": "network", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34337,14 +34793,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.networks.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for List", - "file": "compute_v1_generated_networks_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_networks_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_List_sync", + "regionTag": "compute_v1_generated_Networks_Delete_sync", "segments": [ { "end": 52, @@ -34362,22 +34818,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_list_sync.py" + "title": "compute_v1_generated_networks_delete_sync.py" }, { "canonical": true, @@ -34386,19 +34842,19 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.patch", + "fullName": "google.cloud.compute_v1.NetworksClient.get_effective_firewalls", "method": { - "fullName": "google.cloud.compute.v1.Networks.Patch", + "fullName": "google.cloud.compute.v1.Networks.GetEffectiveFirewalls", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "Patch" + "shortName": "GetEffectiveFirewalls" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchNetworkRequest" + "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsNetworkRequest" }, { "name": "project", @@ -34408,10 +34864,6 @@ "name": "network", "type": "str" }, - { - "name": "network_resource", - "type": "google.cloud.compute_v1.types.Network" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34425,14 +34877,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.NetworksGetEffectiveFirewallsResponse", + "shortName": "get_effective_firewalls" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_networks_patch_sync.py", + "description": "Sample for GetEffectiveFirewalls", + "file": "compute_v1_generated_networks_get_effective_firewalls_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_Patch_sync", + "regionTag": "compute_v1_generated_Networks_GetEffectiveFirewalls_sync", "segments": [ { "end": 52, @@ -34465,7 +34917,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_patch_sync.py" + "title": "compute_v1_generated_networks_get_effective_firewalls_sync.py" }, { "canonical": true, @@ -34474,19 +34926,19 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.remove_peering", + "fullName": "google.cloud.compute_v1.NetworksClient.get", "method": { - "fullName": "google.cloud.compute.v1.Networks.RemovePeering", + "fullName": "google.cloud.compute.v1.Networks.Get", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "RemovePeering" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemovePeeringNetworkRequest" + "type": "google.cloud.compute_v1.types.GetNetworkRequest" }, { "name": "project", @@ -34496,10 +34948,6 @@ "name": "network", "type": "str" }, - { - "name": "networks_remove_peering_request_resource", - "type": "google.cloud.compute_v1.types.NetworksRemovePeeringRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34513,14 +34961,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_peering" + "resultType": "google.cloud.compute_v1.types.Network", + "shortName": "get" }, - "description": "Sample for RemovePeering", - "file": "compute_v1_generated_networks_remove_peering_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_networks_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_RemovePeering_sync", + "regionTag": "compute_v1_generated_Networks_Get_sync", "segments": [ { "end": 52, @@ -34553,7 +35001,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_remove_peering_sync.py" + "title": "compute_v1_generated_networks_get_sync.py" }, { "canonical": true, @@ -34562,31 +35010,27 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.request_remove_peering", + "fullName": "google.cloud.compute_v1.NetworksClient.insert", "method": { - "fullName": "google.cloud.compute.v1.Networks.RequestRemovePeering", + "fullName": "google.cloud.compute.v1.Networks.Insert", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "RequestRemovePeering" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RequestRemovePeeringNetworkRequest" + "type": "google.cloud.compute_v1.types.InsertNetworkRequest" }, { "name": "project", "type": "str" }, { - "name": "network", - "type": "str" - }, - { - "name": "networks_request_remove_peering_request_resource", - "type": "google.cloud.compute_v1.types.NetworksRequestRemovePeeringRequest" + "name": "network_resource", + "type": "google.cloud.compute_v1.types.Network" }, { "name": "retry", @@ -34602,21 +35046,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "request_remove_peering" + "shortName": "insert" }, - "description": "Sample for RequestRemovePeering", - "file": "compute_v1_generated_networks_request_remove_peering_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_networks_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_RequestRemovePeering_sync", + "regionTag": "compute_v1_generated_Networks_Insert_sync", "segments": [ { - "end": 52, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 51, "start": 27, "type": "SHORT" }, @@ -34626,22 +35070,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_request_remove_peering_sync.py" + "title": "compute_v1_generated_networks_insert_sync.py" }, { "canonical": true, @@ -34650,19 +35094,19 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.switch_to_custom_mode", + "fullName": "google.cloud.compute_v1.NetworksClient.list_peering_routes", "method": { - "fullName": "google.cloud.compute.v1.Networks.SwitchToCustomMode", + "fullName": "google.cloud.compute.v1.Networks.ListPeeringRoutes", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "SwitchToCustomMode" + "shortName": "ListPeeringRoutes" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest" + "type": "google.cloud.compute_v1.types.ListPeeringRoutesNetworksRequest" }, { "name": "project", @@ -34685,22 +35129,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "switch_to_custom_mode" + "resultType": "google.cloud.compute_v1.services.networks.pagers.ListPeeringRoutesPager", + "shortName": "list_peering_routes" }, - "description": "Sample for SwitchToCustomMode", - "file": "compute_v1_generated_networks_switch_to_custom_mode_sync.py", + "description": "Sample for ListPeeringRoutes", + "file": "compute_v1_generated_networks_list_peering_routes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_SwitchToCustomMode_sync", + "regionTag": "compute_v1_generated_Networks_ListPeeringRoutes_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -34720,12 +35164,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_switch_to_custom_mode_sync.py" + "title": "compute_v1_generated_networks_list_peering_routes_sync.py" }, { "canonical": true, @@ -34734,32 +35178,24 @@ "fullName": "google.cloud.compute_v1.NetworksClient", "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NetworksClient.update_peering", + "fullName": "google.cloud.compute_v1.NetworksClient.list", "method": { - "fullName": "google.cloud.compute.v1.Networks.UpdatePeering", + "fullName": "google.cloud.compute.v1.Networks.List", "service": { "fullName": "google.cloud.compute.v1.Networks", "shortName": "Networks" }, - "shortName": "UpdatePeering" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdatePeeringNetworkRequest" + "type": "google.cloud.compute_v1.types.ListNetworksRequest" }, { "name": "project", "type": "str" }, - { - "name": "network", - "type": "str" - }, - { - "name": "networks_update_peering_request_resource", - "type": "google.cloud.compute_v1.types.NetworksUpdatePeeringRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34773,14 +35209,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update_peering" + "resultType": "google.cloud.compute_v1.services.networks.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for UpdatePeering", - "file": "compute_v1_generated_networks_update_peering_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_networks_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_Networks_UpdatePeering_sync", + "regionTag": "compute_v1_generated_Networks_List_sync", "segments": [ { "end": 52, @@ -34798,59 +35234,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_networks_update_peering_sync.py" + "title": "compute_v1_generated_networks_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeGroupsClient", - "shortName": "NodeGroupsClient" + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.add_nodes", + "fullName": "google.cloud.compute_v1.NetworksClient.patch", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.AddNodes", + "fullName": "google.cloud.compute.v1.Networks.Patch", "service": { - "fullName": "google.cloud.compute.v1.NodeGroups", - "shortName": "NodeGroups" + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" }, - "shortName": "AddNodes" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddNodesNodeGroupRequest" + "type": "google.cloud.compute_v1.types.PatchNetworkRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", - "type": "str" - }, - { - "name": "node_group", + "name": "network", "type": "str" }, { - "name": "node_groups_add_nodes_request_resource", - "type": "google.cloud.compute_v1.types.NodeGroupsAddNodesRequest" + "name": "network_resource", + "type": "google.cloud.compute_v1.types.Network" }, { "name": "retry", @@ -34866,21 +35298,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_nodes" + "shortName": "patch" }, - "description": "Sample for AddNodes", - "file": "compute_v1_generated_node_groups_add_nodes_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_networks_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_AddNodes_sync", + "regionTag": "compute_v1_generated_Networks_Patch_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -34890,48 +35322,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_add_nodes_sync.py" + "title": "compute_v1_generated_networks_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeGroupsClient", - "shortName": "NodeGroupsClient" + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.NetworksClient.remove_peering", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.AggregatedList", + "fullName": "google.cloud.compute.v1.Networks.RemovePeering", "service": { - "fullName": "google.cloud.compute.v1.NodeGroups", - "shortName": "NodeGroups" + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" }, - "shortName": "AggregatedList" + "shortName": "RemovePeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest" + "type": "google.cloud.compute_v1.types.RemovePeeringNetworkRequest" }, { "name": "project", "type": "str" }, + { + "name": "network", + "type": "str" + }, + { + "name": "networks_remove_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksRemovePeeringRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -34945,14 +35385,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.node_groups.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_peering" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_node_groups_aggregated_list_sync.py", + "description": "Sample for RemovePeering", + "file": "compute_v1_generated_networks_remove_peering_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_AggregatedList_sync", + "regionTag": "compute_v1_generated_Networks_RemovePeering_sync", "segments": [ { "end": 52, @@ -34970,59 +35410,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_aggregated_list_sync.py" + "title": "compute_v1_generated_networks_remove_peering_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeGroupsClient", - "shortName": "NodeGroupsClient" + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.delete_nodes", + "fullName": "google.cloud.compute_v1.NetworksClient.request_remove_peering", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.DeleteNodes", + "fullName": "google.cloud.compute.v1.Networks.RequestRemovePeering", "service": { - "fullName": "google.cloud.compute.v1.NodeGroups", - "shortName": "NodeGroups" + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" }, - "shortName": "DeleteNodes" + "shortName": "RequestRemovePeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest" + "type": "google.cloud.compute_v1.types.RequestRemovePeeringNetworkRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", - "type": "str" - }, - { - "name": "node_group", + "name": "network", "type": "str" }, { - "name": "node_groups_delete_nodes_request_resource", - "type": "google.cloud.compute_v1.types.NodeGroupsDeleteNodesRequest" + "name": "networks_request_remove_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksRequestRemovePeeringRequest" }, { "name": "retry", @@ -35038,21 +35474,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete_nodes" + "shortName": "request_remove_peering" }, - "description": "Sample for DeleteNodes", - "file": "compute_v1_generated_node_groups_delete_nodes_sync.py", + "description": "Sample for RequestRemovePeering", + "file": "compute_v1_generated_networks_request_remove_peering_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_DeleteNodes_sync", + "regionTag": "compute_v1_generated_Networks_RequestRemovePeering_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -35062,54 +35498,50 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_delete_nodes_sync.py" + "title": "compute_v1_generated_networks_request_remove_peering_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeGroupsClient", - "shortName": "NodeGroupsClient" + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.delete", + "fullName": "google.cloud.compute_v1.NetworksClient.switch_to_custom_mode", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.Delete", + "fullName": "google.cloud.compute.v1.Networks.SwitchToCustomMode", "service": { - "fullName": "google.cloud.compute.v1.NodeGroups", - "shortName": "NodeGroups" + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" }, - "shortName": "Delete" + "shortName": "SwitchToCustomMode" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNodeGroupRequest" + "type": "google.cloud.compute_v1.types.SwitchToCustomModeNetworkRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", - "type": "str" - }, - { - "name": "node_group", + "name": "network", "type": "str" }, { @@ -35126,21 +35558,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "switch_to_custom_mode" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_node_groups_delete_sync.py", + "description": "Sample for SwitchToCustomMode", + "file": "compute_v1_generated_networks_switch_to_custom_mode_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_Delete_sync", + "regionTag": "compute_v1_generated_Networks_SwitchToCustomMode_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -35150,55 +35582,55 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_delete_sync.py" + "title": "compute_v1_generated_networks_switch_to_custom_mode_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeGroupsClient", - "shortName": "NodeGroupsClient" + "fullName": "google.cloud.compute_v1.NetworksClient", + "shortName": "NetworksClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.NetworksClient.update_peering", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.GetIamPolicy", + "fullName": "google.cloud.compute.v1.Networks.UpdatePeering", "service": { - "fullName": "google.cloud.compute.v1.NodeGroups", - "shortName": "NodeGroups" + "fullName": "google.cloud.compute.v1.Networks", + "shortName": "Networks" }, - "shortName": "GetIamPolicy" + "shortName": "UpdatePeering" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyNodeGroupRequest" + "type": "google.cloud.compute_v1.types.UpdatePeeringNetworkRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "network", "type": "str" }, { - "name": "resource", - "type": "str" + "name": "networks_update_peering_request_resource", + "type": "google.cloud.compute_v1.types.NetworksUpdatePeeringRequest" }, { "name": "retry", @@ -35213,22 +35645,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_peering" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_node_groups_get_iam_policy_sync.py", + "description": "Sample for UpdatePeering", + "file": "compute_v1_generated_networks_update_peering_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_Networks_UpdatePeering_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -35238,22 +35670,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_get_iam_policy_sync.py" + "title": "compute_v1_generated_networks_update_peering_sync.py" }, { "canonical": true, @@ -35262,19 +35694,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.get", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.add_nodes", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.Get", + "fullName": "google.cloud.compute.v1.NodeGroups.AddNodes", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "Get" + "shortName": "AddNodes" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNodeGroupRequest" + "type": "google.cloud.compute_v1.types.AddNodesNodeGroupRequest" }, { "name": "project", @@ -35288,6 +35720,10 @@ "name": "node_group", "type": "str" }, + { + "name": "node_groups_add_nodes_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsAddNodesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -35301,14 +35737,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NodeGroup", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_nodes" }, - "description": "Sample for Get", - "file": "compute_v1_generated_node_groups_get_sync.py", + "description": "Sample for AddNodes", + "file": "compute_v1_generated_node_groups_add_nodes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_Get_sync", + "regionTag": "compute_v1_generated_NodeGroups_AddNodes_sync", "segments": [ { "end": 53, @@ -35341,7 +35777,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_get_sync.py" + "title": "compute_v1_generated_node_groups_add_nodes_sync.py" }, { "canonical": true, @@ -35350,36 +35786,24 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.insert", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.Insert", + "fullName": "google.cloud.compute.v1.NodeGroups.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "Insert" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNodeGroupRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNodeGroupsRequest" }, { "name": "project", "type": "str" }, - { - "name": "zone", - "type": "str" - }, - { - "name": "initial_node_count", - "type": "int" - }, - { - "name": "node_group_resource", - "type": "google.cloud.compute_v1.types.NodeGroup" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -35393,22 +35817,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.node_groups.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_node_groups_insert_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_node_groups_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_Insert_sync", + "regionTag": "compute_v1_generated_NodeGroups_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -35418,22 +35842,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_insert_sync.py" + "title": "compute_v1_generated_node_groups_aggregated_list_sync.py" }, { "canonical": true, @@ -35442,19 +35866,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.list_nodes", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.delete_nodes", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.ListNodes", + "fullName": "google.cloud.compute.v1.NodeGroups.DeleteNodes", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "ListNodes" + "shortName": "DeleteNodes" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNodesNodeGroupsRequest" + "type": "google.cloud.compute_v1.types.DeleteNodesNodeGroupRequest" }, { "name": "project", @@ -35469,88 +35893,8 @@ "type": "str" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.compute_v1.services.node_groups.pagers.ListNodesPager", - "shortName": "list_nodes" - }, - "description": "Sample for ListNodes", - "file": "compute_v1_generated_node_groups_list_nodes_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_ListNodes_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_node_groups_list_nodes_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.NodeGroupsClient", - "shortName": "NodeGroupsClient" - }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.list", - "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.List", - "service": { - "fullName": "google.cloud.compute.v1.NodeGroups", - "shortName": "NodeGroups" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListNodeGroupsRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "zone", - "type": "str" + "name": "node_groups_delete_nodes_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsDeleteNodesRequest" }, { "name": "retry", @@ -35565,14 +35909,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.node_groups.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_nodes" }, - "description": "Sample for List", - "file": "compute_v1_generated_node_groups_list_sync.py", + "description": "Sample for DeleteNodes", + "file": "compute_v1_generated_node_groups_delete_nodes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_List_sync", + "regionTag": "compute_v1_generated_NodeGroups_DeleteNodes_sync", "segments": [ { "end": 53, @@ -35590,22 +35934,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_list_sync.py" + "title": "compute_v1_generated_node_groups_delete_nodes_sync.py" }, { "canonical": true, @@ -35614,19 +35958,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.patch", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.Patch", + "fullName": "google.cloud.compute.v1.NodeGroups.Delete", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "Patch" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchNodeGroupRequest" + "type": "google.cloud.compute_v1.types.DeleteNodeGroupRequest" }, { "name": "project", @@ -35640,10 +35984,6 @@ "name": "node_group", "type": "str" }, - { - "name": "node_group_resource", - "type": "google.cloud.compute_v1.types.NodeGroup" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -35658,13 +35998,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "delete" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_node_groups_patch_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_node_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_Patch_sync", + "regionTag": "compute_v1_generated_NodeGroups_Delete_sync", "segments": [ { "end": 53, @@ -35697,7 +36037,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_patch_sync.py" + "title": "compute_v1_generated_node_groups_delete_sync.py" }, { "canonical": true, @@ -35706,19 +36046,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.perform_maintenance", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.PerformMaintenance", + "fullName": "google.cloud.compute.v1.NodeGroups.GetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "PerformMaintenance" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PerformMaintenanceNodeGroupRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyNodeGroupRequest" }, { "name": "project", @@ -35729,13 +36069,9 @@ "type": "str" }, { - "name": "node_group", + "name": "resource", "type": "str" }, - { - "name": "node_groups_perform_maintenance_request_resource", - "type": "google.cloud.compute_v1.types.NodeGroupsPerformMaintenanceRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -35749,14 +36085,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "perform_maintenance" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for PerformMaintenance", - "file": "compute_v1_generated_node_groups_perform_maintenance_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_node_groups_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_PerformMaintenance_sync", + "regionTag": "compute_v1_generated_NodeGroups_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -35789,7 +36125,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_perform_maintenance_sync.py" + "title": "compute_v1_generated_node_groups_get_iam_policy_sync.py" }, { "canonical": true, @@ -35798,19 +36134,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.SetIamPolicy", + "fullName": "google.cloud.compute.v1.NodeGroups.Get", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "SetIamPolicy" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyNodeGroupRequest" + "type": "google.cloud.compute_v1.types.GetNodeGroupRequest" }, { "name": "project", @@ -35821,13 +36157,9 @@ "type": "str" }, { - "name": "resource", + "name": "node_group", "type": "str" }, - { - "name": "zone_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -35841,14 +36173,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.types.NodeGroup", + "shortName": "get" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_node_groups_set_iam_policy_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_node_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_NodeGroups_Get_sync", "segments": [ { "end": 53, @@ -35881,7 +36213,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_set_iam_policy_sync.py" + "title": "compute_v1_generated_node_groups_get_sync.py" }, { "canonical": true, @@ -35890,19 +36222,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.set_node_template", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.SetNodeTemplate", + "fullName": "google.cloud.compute.v1.NodeGroups.Insert", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "SetNodeTemplate" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest" + "type": "google.cloud.compute_v1.types.InsertNodeGroupRequest" }, { "name": "project", @@ -35913,12 +36245,12 @@ "type": "str" }, { - "name": "node_group", - "type": "str" + "name": "initial_node_count", + "type": "int" }, { - "name": "node_groups_set_node_template_request_resource", - "type": "google.cloud.compute_v1.types.NodeGroupsSetNodeTemplateRequest" + "name": "node_group_resource", + "type": "google.cloud.compute_v1.types.NodeGroup" }, { "name": "retry", @@ -35934,13 +36266,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_node_template" + "shortName": "insert" }, - "description": "Sample for SetNodeTemplate", - "file": "compute_v1_generated_node_groups_set_node_template_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_node_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_SetNodeTemplate_sync", + "regionTag": "compute_v1_generated_NodeGroups_Insert_sync", "segments": [ { "end": 53, @@ -35973,7 +36305,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_set_node_template_sync.py" + "title": "compute_v1_generated_node_groups_insert_sync.py" }, { "canonical": true, @@ -35982,19 +36314,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.simulate_maintenance_event", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.list_nodes", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.SimulateMaintenanceEvent", + "fullName": "google.cloud.compute.v1.NodeGroups.ListNodes", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "SimulateMaintenanceEvent" + "shortName": "ListNodes" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SimulateMaintenanceEventNodeGroupRequest" + "type": "google.cloud.compute_v1.types.ListNodesNodeGroupsRequest" }, { "name": "project", @@ -36008,10 +36340,6 @@ "name": "node_group", "type": "str" }, - { - "name": "node_groups_simulate_maintenance_event_request_resource", - "type": "google.cloud.compute_v1.types.NodeGroupsSimulateMaintenanceEventRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36025,22 +36353,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "simulate_maintenance_event" + "resultType": "google.cloud.compute_v1.services.node_groups.pagers.ListNodesPager", + "shortName": "list_nodes" }, - "description": "Sample for SimulateMaintenanceEvent", - "file": "compute_v1_generated_node_groups_simulate_maintenance_event_sync.py", + "description": "Sample for ListNodes", + "file": "compute_v1_generated_node_groups_list_nodes_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_SimulateMaintenanceEvent_sync", + "regionTag": "compute_v1_generated_NodeGroups_ListNodes_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -36060,12 +36388,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_simulate_maintenance_event_sync.py" + "title": "compute_v1_generated_node_groups_list_nodes_sync.py" }, { "canonical": true, @@ -36074,19 +36402,19 @@ "fullName": "google.cloud.compute_v1.NodeGroupsClient", "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeGroupsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.NodeGroups.TestIamPermissions", + "fullName": "google.cloud.compute.v1.NodeGroups.List", "service": { "fullName": "google.cloud.compute.v1.NodeGroups", "shortName": "NodeGroups" }, - "shortName": "TestIamPermissions" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsNodeGroupRequest" + "type": "google.cloud.compute_v1.types.ListNodeGroupsRequest" }, { "name": "project", @@ -36096,14 +36424,6 @@ "name": "zone", "type": "str" }, - { - "name": "resource", - "type": "str" - }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36117,14 +36437,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.services.node_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_node_groups_test_iam_permissions_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_node_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeGroups_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_NodeGroups_List_sync", "segments": [ { "end": 53, @@ -36142,48 +36462,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_groups_test_iam_permissions_sync.py" + "title": "compute_v1_generated_node_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTemplatesClient", - "shortName": "NodeTemplatesClient" + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.AggregatedList", + "fullName": "google.cloud.compute.v1.NodeGroups.Patch", "service": { - "fullName": "google.cloud.compute.v1.NodeTemplates", - "shortName": "NodeTemplates" + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" }, - "shortName": "AggregatedList" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest" + "type": "google.cloud.compute_v1.types.PatchNodeGroupRequest" }, { "name": "project", "type": "str" }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_group", + "type": "str" + }, + { + "name": "node_group_resource", + "type": "google.cloud.compute_v1.types.NodeGroup" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36197,22 +36529,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.node_templates.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_node_templates_aggregated_list_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_node_groups_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_AggregatedList_sync", + "regionTag": "compute_v1_generated_NodeGroups_Patch_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -36222,56 +36554,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_aggregated_list_sync.py" + "title": "compute_v1_generated_node_groups_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTemplatesClient", - "shortName": "NodeTemplatesClient" + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.delete", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.perform_maintenance", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.Delete", + "fullName": "google.cloud.compute.v1.NodeGroups.PerformMaintenance", "service": { - "fullName": "google.cloud.compute.v1.NodeTemplates", - "shortName": "NodeTemplates" + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" }, - "shortName": "Delete" + "shortName": "PerformMaintenance" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteNodeTemplateRequest" + "type": "google.cloud.compute_v1.types.PerformMaintenanceNodeGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, { - "name": "node_template", + "name": "node_group", "type": "str" }, + { + "name": "node_groups_perform_maintenance_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsPerformMaintenanceRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36286,13 +36622,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "perform_maintenance" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_node_templates_delete_sync.py", + "description": "Sample for PerformMaintenance", + "file": "compute_v1_generated_node_groups_perform_maintenance_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_Delete_sync", + "regionTag": "compute_v1_generated_NodeGroups_PerformMaintenance_sync", "segments": [ { "end": 53, @@ -36325,41 +36661,45 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_delete_sync.py" + "title": "compute_v1_generated_node_groups_perform_maintenance_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTemplatesClient", - "shortName": "NodeTemplatesClient" + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.GetIamPolicy", + "fullName": "google.cloud.compute.v1.NodeGroups.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.NodeTemplates", - "shortName": "NodeTemplates" + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" }, - "shortName": "GetIamPolicy" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyNodeTemplateRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyNodeGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, { "name": "resource", "type": "str" }, + { + "name": "zone_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.ZoneSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36374,13 +36714,13 @@ } ], "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "shortName": "set_iam_policy" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_node_templates_get_iam_policy_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_node_groups_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_NodeGroups_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -36413,41 +36753,45 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_get_iam_policy_sync.py" + "title": "compute_v1_generated_node_groups_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTemplatesClient", - "shortName": "NodeTemplatesClient" + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.get", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.set_node_template", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.Get", + "fullName": "google.cloud.compute.v1.NodeGroups.SetNodeTemplate", "service": { - "fullName": "google.cloud.compute.v1.NodeTemplates", - "shortName": "NodeTemplates" + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" }, - "shortName": "Get" + "shortName": "SetNodeTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNodeTemplateRequest" + "type": "google.cloud.compute_v1.types.SetNodeTemplateNodeGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, { - "name": "node_template", + "name": "node_group", "type": "str" }, + { + "name": "node_groups_set_node_template_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsSetNodeTemplateRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36461,14 +36805,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NodeTemplate", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_node_template" }, - "description": "Sample for Get", - "file": "compute_v1_generated_node_templates_get_sync.py", + "description": "Sample for SetNodeTemplate", + "file": "compute_v1_generated_node_groups_set_node_template_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_Get_sync", + "regionTag": "compute_v1_generated_NodeGroups_SetNodeTemplate_sync", "segments": [ { "end": 53, @@ -36501,40 +36845,44 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_get_sync.py" + "title": "compute_v1_generated_node_groups_set_node_template_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTemplatesClient", - "shortName": "NodeTemplatesClient" + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.insert", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.simulate_maintenance_event", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.Insert", + "fullName": "google.cloud.compute.v1.NodeGroups.SimulateMaintenanceEvent", "service": { - "fullName": "google.cloud.compute.v1.NodeTemplates", - "shortName": "NodeTemplates" + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" }, - "shortName": "Insert" + "shortName": "SimulateMaintenanceEvent" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertNodeTemplateRequest" + "type": "google.cloud.compute_v1.types.SimulateMaintenanceEventNodeGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, { - "name": "node_template_resource", - "type": "google.cloud.compute_v1.types.NodeTemplate" + "name": "node_group", + "type": "str" + }, + { + "name": "node_groups_simulate_maintenance_event_request_resource", + "type": "google.cloud.compute_v1.types.NodeGroupsSimulateMaintenanceEventRequest" }, { "name": "retry", @@ -36550,21 +36898,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "simulate_maintenance_event" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_node_templates_insert_sync.py", + "description": "Sample for SimulateMaintenanceEvent", + "file": "compute_v1_generated_node_groups_simulate_maintenance_event_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_Insert_sync", + "regionTag": "compute_v1_generated_NodeGroups_SimulateMaintenanceEvent_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -36574,52 +36922,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_insert_sync.py" + "title": "compute_v1_generated_node_groups_simulate_maintenance_event_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTemplatesClient", - "shortName": "NodeTemplatesClient" + "fullName": "google.cloud.compute_v1.NodeGroupsClient", + "shortName": "NodeGroupsClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.list", + "fullName": "google.cloud.compute_v1.NodeGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.List", + "fullName": "google.cloud.compute.v1.NodeGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.NodeTemplates", - "shortName": "NodeTemplates" + "fullName": "google.cloud.compute.v1.NodeGroups", + "shortName": "NodeGroups" }, - "shortName": "List" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNodeTemplatesRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsNodeGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "zone", "type": "str" }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36633,14 +36989,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.node_templates.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for List", - "file": "compute_v1_generated_node_templates_list_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_node_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_List_sync", + "regionTag": "compute_v1_generated_NodeGroups_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -36658,22 +37014,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_list_sync.py" + "title": "compute_v1_generated_node_groups_test_iam_permissions_sync.py" }, { "canonical": true, @@ -36682,36 +37038,24 @@ "fullName": "google.cloud.compute_v1.NodeTemplatesClient", "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.SetIamPolicy", + "fullName": "google.cloud.compute.v1.NodeTemplates.AggregatedList", "service": { "fullName": "google.cloud.compute.v1.NodeTemplates", "shortName": "NodeTemplates" }, - "shortName": "SetIamPolicy" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyNodeTemplateRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNodeTemplatesRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36725,22 +37069,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.services.node_templates.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_node_templates_set_iam_policy_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_node_templates_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_NodeTemplates_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -36750,22 +37094,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_set_iam_policy_sync.py" + "title": "compute_v1_generated_node_templates_aggregated_list_sync.py" }, { "canonical": true, @@ -36774,19 +37118,19 @@ "fullName": "google.cloud.compute_v1.NodeTemplatesClient", "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.NodeTemplatesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.NodeTemplates.TestIamPermissions", + "fullName": "google.cloud.compute.v1.NodeTemplates.Delete", "service": { "fullName": "google.cloud.compute.v1.NodeTemplates", "shortName": "NodeTemplates" }, - "shortName": "TestIamPermissions" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsNodeTemplateRequest" + "type": "google.cloud.compute_v1.types.DeleteNodeTemplateRequest" }, { "name": "project", @@ -36797,13 +37141,9 @@ "type": "str" }, { - "name": "resource", + "name": "node_template", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36817,14 +37157,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_node_templates_test_iam_permissions_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_node_templates_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTemplates_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_NodeTemplates_Delete_sync", "segments": [ { "end": 53, @@ -36857,33 +37197,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_templates_test_iam_permissions_sync.py" + "title": "compute_v1_generated_node_templates_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTypesClient", - "shortName": "NodeTypesClient" + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.NodeTypesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.NodeTypes.AggregatedList", + "fullName": "google.cloud.compute.v1.NodeTemplates.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.NodeTypes", - "shortName": "NodeTypes" + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" }, - "shortName": "AggregatedList" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListNodeTypesRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyNodeTemplateRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -36897,22 +37245,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.node_types.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_node_types_aggregated_list_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_node_templates_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTypes_AggregatedList_sync", + "regionTag": "compute_v1_generated_NodeTemplates_GetIamPolicy_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -36922,54 +37270,54 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_types_aggregated_list_sync.py" + "title": "compute_v1_generated_node_templates_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTypesClient", - "shortName": "NodeTypesClient" + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.NodeTypesClient.get", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.get", "method": { - "fullName": "google.cloud.compute.v1.NodeTypes.Get", + "fullName": "google.cloud.compute.v1.NodeTemplates.Get", "service": { - "fullName": "google.cloud.compute.v1.NodeTypes", - "shortName": "NodeTypes" + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetNodeTypeRequest" + "type": "google.cloud.compute_v1.types.GetNodeTemplateRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "region", "type": "str" }, { - "name": "node_type", + "name": "node_template", "type": "str" }, { @@ -36985,14 +37333,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NodeType", + "resultType": "google.cloud.compute_v1.types.NodeTemplate", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_node_types_get_sync.py", + "file": "compute_v1_generated_node_templates_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTypes_Get_sync", + "regionTag": "compute_v1_generated_NodeTemplates_Get_sync", "segments": [ { "end": 53, @@ -37025,37 +37373,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_types_get_sync.py" + "title": "compute_v1_generated_node_templates_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.NodeTypesClient", - "shortName": "NodeTypesClient" + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.NodeTypesClient.list", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.NodeTypes.List", + "fullName": "google.cloud.compute.v1.NodeTemplates.Insert", "service": { - "fullName": "google.cloud.compute.v1.NodeTypes", - "shortName": "NodeTypes" + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" }, - "shortName": "List" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNodeTypesRequest" + "type": "google.cloud.compute_v1.types.InsertNodeTemplateRequest" }, { "name": "project", "type": "str" }, { - "name": "zone", + "name": "region", "type": "str" }, + { + "name": "node_template_resource", + "type": "google.cloud.compute_v1.types.NodeTemplate" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -37069,22 +37421,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.node_types.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for List", - "file": "compute_v1_generated_node_types_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_node_templates_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_NodeTypes_List_sync", + "regionTag": "compute_v1_generated_NodeTemplates_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -37104,41 +37456,41 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_node_types_list_sync.py" + "title": "compute_v1_generated_node_templates_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", - "shortName": "OrganizationSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.add_association", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.list", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.AddAssociation", + "fullName": "google.cloud.compute.v1.NodeTemplates.List", "service": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", - "shortName": "OrganizationSecurityPolicies" + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" }, - "shortName": "AddAssociation" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddAssociationOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.ListNodeTemplatesRequest" }, { - "name": "security_policy", + "name": "project", "type": "str" }, { - "name": "security_policy_association_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyAssociation" + "name": "region", + "type": "str" }, { "name": "retry", @@ -37153,22 +37505,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_association" + "resultType": "google.cloud.compute_v1.services.node_templates.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for AddAssociation", - "file": "compute_v1_generated_organization_security_policies_add_association_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_node_templates_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_AddAssociation_sync", + "regionTag": "compute_v1_generated_NodeTemplates_List_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -37178,51 +37530,59 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_add_association_sync.py" + "title": "compute_v1_generated_node_templates_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", - "shortName": "OrganizationSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.add_rule", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.AddRule", + "fullName": "google.cloud.compute.v1.NodeTemplates.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", - "shortName": "OrganizationSecurityPolicies" + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" }, - "shortName": "AddRule" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddRuleOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyNodeTemplateRequest" }, { - "name": "security_policy", + "name": "project", "type": "str" }, { - "name": "security_policy_rule_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" }, { "name": "retry", @@ -37237,22 +37597,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_rule" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for AddRule", - "file": "compute_v1_generated_organization_security_policies_add_rule_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_node_templates_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_AddRule_sync", + "regionTag": "compute_v1_generated_NodeTemplates_SetIamPolicy_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -37262,48 +37622,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_add_rule_sync.py" + "title": "compute_v1_generated_node_templates_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", - "shortName": "OrganizationSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.NodeTemplatesClient", + "shortName": "NodeTemplatesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.copy_rules", + "fullName": "google.cloud.compute_v1.NodeTemplatesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.CopyRules", + "fullName": "google.cloud.compute.v1.NodeTemplates.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", - "shortName": "OrganizationSecurityPolicies" + "fullName": "google.cloud.compute.v1.NodeTemplates", + "shortName": "NodeTemplates" }, - "shortName": "CopyRules" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CopyRulesOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsNodeTemplateRequest" }, { - "name": "security_policy", + "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -37317,22 +37689,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "copy_rules" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for CopyRules", - "file": "compute_v1_generated_organization_security_policies_copy_rules_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_node_templates_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_CopyRules_sync", + "regionTag": "compute_v1_generated_NodeTemplates_TestIamPermissions_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -37342,46 +37714,46 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_copy_rules_sync.py" + "title": "compute_v1_generated_node_templates_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", - "shortName": "OrganizationSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.NodeTypesClient", + "shortName": "NodeTypesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.NodeTypesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.Delete", + "fullName": "google.cloud.compute.v1.NodeTypes.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", - "shortName": "OrganizationSecurityPolicies" + "fullName": "google.cloud.compute.v1.NodeTypes", + "shortName": "NodeTypes" }, - "shortName": "Delete" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.AggregatedListNodeTypesRequest" }, { - "name": "security_policy", + "name": "project", "type": "str" }, { @@ -37397,22 +37769,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.node_types.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_organization_security_policies_delete_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_node_types_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_Delete_sync", + "regionTag": "compute_v1_generated_NodeTypes_AggregatedList_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -37432,36 +37804,44 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_delete_sync.py" + "title": "compute_v1_generated_node_types_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", - "shortName": "OrganizationSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.NodeTypesClient", + "shortName": "NodeTypesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.get_association", + "fullName": "google.cloud.compute_v1.NodeTypesClient.get", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.GetAssociation", + "fullName": "google.cloud.compute.v1.NodeTypes.Get", "service": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", - "shortName": "OrganizationSecurityPolicies" + "fullName": "google.cloud.compute.v1.NodeTypes", + "shortName": "NodeTypes" }, - "shortName": "GetAssociation" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetAssociationOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.GetNodeTypeRequest" }, { - "name": "security_policy", + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "node_type", "type": "str" }, { @@ -37477,22 +37857,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.SecurityPolicyAssociation", - "shortName": "get_association" + "resultType": "google.cloud.compute_v1.types.NodeType", + "shortName": "get" }, - "description": "Sample for GetAssociation", - "file": "compute_v1_generated_organization_security_policies_get_association_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_node_types_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_GetAssociation_sync", + "regionTag": "compute_v1_generated_NodeTypes_Get_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -37502,46 +37882,50 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_get_association_sync.py" + "title": "compute_v1_generated_node_types_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", - "shortName": "OrganizationSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.NodeTypesClient", + "shortName": "NodeTypesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.get_rule", + "fullName": "google.cloud.compute_v1.NodeTypesClient.list", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.GetRule", + "fullName": "google.cloud.compute.v1.NodeTypes.List", "service": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", - "shortName": "OrganizationSecurityPolicies" + "fullName": "google.cloud.compute.v1.NodeTypes", + "shortName": "NodeTypes" }, - "shortName": "GetRule" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRuleOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.ListNodeTypesRequest" }, { - "name": "security_policy", + "name": "project", + "type": "str" + }, + { + "name": "zone", "type": "str" }, { @@ -37557,22 +37941,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.SecurityPolicyRule", - "shortName": "get_rule" + "resultType": "google.cloud.compute_v1.services.node_types.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for GetRule", - "file": "compute_v1_generated_organization_security_policies_get_rule_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_node_types_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_GetRule_sync", + "regionTag": "compute_v1_generated_NodeTypes_List_sync", "segments": [ { - "end": 51, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 53, "start": 27, "type": "SHORT" }, @@ -37582,22 +37966,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_get_rule_sync.py" + "title": "compute_v1_generated_node_types_list_sync.py" }, { "canonical": true, @@ -37606,24 +37990,28 @@ "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", "shortName": "OrganizationSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.get", + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.add_association", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.Get", + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.AddAssociation", "service": { "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", "shortName": "OrganizationSecurityPolicies" }, - "shortName": "Get" + "shortName": "AddAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.AddAssociationOrganizationSecurityPolicyRequest" }, { "name": "security_policy", "type": "str" }, + { + "name": "security_policy_association_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyAssociation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -37637,14 +38025,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.SecurityPolicy", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_association" }, - "description": "Sample for Get", - "file": "compute_v1_generated_organization_security_policies_get_sync.py", + "description": "Sample for AddAssociation", + "file": "compute_v1_generated_organization_security_policies_add_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_Get_sync", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_AddAssociation_sync", "segments": [ { "end": 51, @@ -37677,7 +38065,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_get_sync.py" + "title": "compute_v1_generated_organization_security_policies_add_association_sync.py" }, { "canonical": true, @@ -37686,23 +38074,27 @@ "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", "shortName": "OrganizationSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.add_rule", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.Insert", + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.AddRule", "service": { "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", "shortName": "OrganizationSecurityPolicies" }, - "shortName": "Insert" + "shortName": "AddRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.AddRuleOrganizationSecurityPolicyRequest" }, { - "name": "security_policy_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicy" + "name": "security_policy", + "type": "str" + }, + { + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" }, { "name": "retry", @@ -37718,21 +38110,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "add_rule" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_organization_security_policies_insert_sync.py", + "description": "Sample for AddRule", + "file": "compute_v1_generated_organization_security_policies_add_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_Insert_sync", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_AddRule_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -37742,22 +38134,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_organization_security_policies_insert_sync.py" + "title": "compute_v1_generated_organization_security_policies_add_rule_sync.py" }, { "canonical": true, @@ -37766,19 +38158,23 @@ "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", "shortName": "OrganizationSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.list_associations", + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.copy_rules", "method": { - "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.ListAssociations", + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.CopyRules", "service": { "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", "shortName": "OrganizationSecurityPolicies" }, - "shortName": "ListAssociations" + "shortName": "CopyRules" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListAssociationsOrganizationSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.CopyRulesOrganizationSecurityPolicyRequest" + }, + { + "name": "security_policy", + "type": "str" }, { "name": "retry", @@ -37793,22 +38189,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.OrganizationSecurityPoliciesListAssociationsResponse", - "shortName": "list_associations" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "copy_rules" }, - "description": "Sample for ListAssociations", - "file": "compute_v1_generated_organization_security_policies_list_associations_sync.py", + "description": "Sample for CopyRules", + "file": "compute_v1_generated_organization_security_policies_copy_rules_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_ListAssociations_sync", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_CopyRules_sync", "segments": [ { - "end": 50, + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -37818,18 +38214,494 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 44, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 47, - "start": 45, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, - "start": 48, + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_organization_security_policies_copy_rules_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", + "shortName": "OrganizationSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.Delete", + "service": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", + "shortName": "OrganizationSecurityPolicies" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteOrganizationSecurityPolicyRequest" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_organization_security_policies_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_Delete_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_organization_security_policies_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", + "shortName": "OrganizationSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.get_association", + "method": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.GetAssociation", + "service": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", + "shortName": "OrganizationSecurityPolicies" + }, + "shortName": "GetAssociation" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetAssociationOrganizationSecurityPolicyRequest" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPolicyAssociation", + "shortName": "get_association" + }, + "description": "Sample for GetAssociation", + "file": "compute_v1_generated_organization_security_policies_get_association_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_GetAssociation_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_organization_security_policies_get_association_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", + "shortName": "OrganizationSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.get_rule", + "method": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.GetRule", + "service": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", + "shortName": "OrganizationSecurityPolicies" + }, + "shortName": "GetRule" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRuleOrganizationSecurityPolicyRequest" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPolicyRule", + "shortName": "get_rule" + }, + "description": "Sample for GetRule", + "file": "compute_v1_generated_organization_security_policies_get_rule_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_GetRule_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_organization_security_policies_get_rule_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", + "shortName": "OrganizationSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.Get", + "service": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", + "shortName": "OrganizationSecurityPolicies" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetOrganizationSecurityPolicyRequest" + }, + { + "name": "security_policy", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.SecurityPolicy", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_organization_security_policies_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_Get_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_organization_security_policies_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", + "shortName": "OrganizationSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.Insert", + "service": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", + "shortName": "OrganizationSecurityPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertOrganizationSecurityPolicyRequest" + }, + { + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_organization_security_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_Insert_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_organization_security_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient", + "shortName": "OrganizationSecurityPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.OrganizationSecurityPoliciesClient.list_associations", + "method": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies.ListAssociations", + "service": { + "fullName": "google.cloud.compute.v1.OrganizationSecurityPolicies", + "shortName": "OrganizationSecurityPolicies" + }, + "shortName": "ListAssociations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListAssociationsOrganizationSecurityPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.OrganizationSecurityPoliciesListAssociationsResponse", + "shortName": "list_associations" + }, + "description": "Sample for ListAssociations", + "file": "compute_v1_generated_organization_security_policies_list_associations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_OrganizationSecurityPolicies_ListAssociations_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "start": 48, "type": "RESPONSE_HANDLING" } ], @@ -41345,28 +42217,3464 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_get_sync.py" + "title": "compute_v1_generated_public_delegated_prefixes_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Insert", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_public_delegated_prefixes_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.List", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_public_delegated_prefixes_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Patch", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "public_delegated_prefix_resource", + "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_public_delegated_prefixes_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", + "shortName": "PublicDelegatedPrefixesClient" + }, + "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.withdraw", + "method": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Withdraw", + "service": { + "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", + "shortName": "PublicDelegatedPrefixes" + }, + "shortName": "Withdraw" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.WithdrawPublicDelegatedPrefixeRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "public_delegated_prefix", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "withdraw" + }, + "description": "Sample for Withdraw", + "file": "compute_v1_generated_public_delegated_prefixes_withdraw_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Withdraw_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_public_delegated_prefixes_withdraw_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_autoscalers_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.Autoscaler", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_autoscalers_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_autoscalers_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionAutoscalersRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_autoscalers.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_autoscalers_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_autoscalers_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Patch_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_autoscalers_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", + "shortName": "RegionAutoscalersClient" + }, + "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionAutoscalers", + "shortName": "RegionAutoscalers" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "autoscaler_resource", + "type": "google.cloud.compute_v1.types.Autoscaler" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_autoscalers_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionAutoscalers_Update_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_autoscalers_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_backend_buckets_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_backend_buckets_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendBucket", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_backend_buckets_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_bucket_resource", + "type": "google.cloud.compute_v1.types.BackendBucket" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_backend_buckets_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.list_usable", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.ListUsable", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "ListUsable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListUsableRegionBackendBucketsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_backend_buckets.pagers.ListUsablePager", + "shortName": "list_usable" + }, + "description": "Sample for ListUsable", + "file": "compute_v1_generated_region_backend_buckets_list_usable_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_ListUsable_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_list_usable_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionBackendBucketsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_backend_buckets.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_backend_buckets_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_bucket", + "type": "str" + }, + { + "name": "backend_bucket_resource", + "type": "google.cloud.compute_v1.types.BackendBucket" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_backend_buckets_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_backend_buckets_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient", + "shortName": "RegionBackendBucketsClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendBucketsClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendBuckets", + "shortName": "RegionBackendBuckets" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionBackendBucketRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_backend_buckets_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendBuckets_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_buckets_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_backend_services_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_health", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.GetHealth", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "GetHealth" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetHealthRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "resource_group_reference_resource", + "type": "google.cloud.compute_v1.types.ResourceGroupReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendServiceGroupHealth", + "shortName": "get_health" + }, + "description": "Sample for GetHealth", + "file": "compute_v1_generated_region_backend_services_get_health_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_GetHealth_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_health_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.GetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "GetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" + }, + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_GetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.BackendService", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_backend_services_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_backend_services_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.list_usable", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.ListUsable", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "ListUsable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListUsableRegionBackendServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_backend_services.pagers.ListUsablePager", + "shortName": "list_usable" + }, + "description": "Sample for ListUsable", + "file": "compute_v1_generated_region_backend_services_list_usable_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_ListUsable_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_list_usable_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionBackendServicesRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_backend_services.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_backend_services_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" + }, + "description": "Sample for Patch", + "file": "compute_v1_generated_region_backend_services_patch_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.set_iam_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.SetIamPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "SetIamPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" + }, + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_SetIamPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.set_security_policy", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.SetSecurityPolicy", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "SetSecurityPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.SetSecurityPolicyRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "security_policy_reference_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_security_policy" + }, + "description": "Sample for SetSecurityPolicy", + "file": "compute_v1_generated_region_backend_services_set_security_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_SetSecurityPolicy_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_set_security_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_backend_services_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", + "shortName": "RegionBackendServicesClient" + }, + "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionBackendServices.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionBackendServices", + "shortName": "RegionBackendServices" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "backend_service", + "type": "str" + }, + { + "name": "backend_service_resource", + "type": "google.cloud.compute_v1.types.BackendService" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_backend_services_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionBackendServices_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_backend_services_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_commitments.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_region_commitments_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.Get", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetRegionCommitmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "commitment", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.Commitment", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_commitments_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.insert", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.Insert", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertRegionCommitmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "commitment_resource", + "type": "google.cloud.compute_v1.types.Commitment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_region_commitments_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.list", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.List", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "List" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.ListRegionCommitmentsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_commitments.pagers.ListPager", + "shortName": "list" + }, + "description": "Sample for List", + "file": "compute_v1_generated_region_commitments_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_List_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", + "shortName": "RegionCommitmentsClient" + }, + "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.update", + "method": { + "fullName": "google.cloud.compute.v1.RegionCommitments.Update", + "service": { + "fullName": "google.cloud.compute.v1.RegionCommitments", + "shortName": "RegionCommitments" + }, + "shortName": "Update" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateRegionCommitmentRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "commitment", + "type": "str" + }, + { + "name": "commitment_resource", + "type": "google.cloud.compute_v1.types.Commitment" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" + }, + "description": "Sample for Update", + "file": "compute_v1_generated_region_commitments_update_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCommitments_Update_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_commitments_update_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListRegionCompositeHealthChecksRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.region_composite_health_checks.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_region_composite_health_checks_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_composite_health_checks_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" + }, + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Delete", + "service": { + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteRegionCompositeHealthCheckRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "composite_health_check", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_region_composite_health_checks_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_composite_health_checks_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", - "shortName": "PublicDelegatedPrefixesClient" + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.insert", + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.get_health", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Insert", + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.GetHealth", "service": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", - "shortName": "PublicDelegatedPrefixes" + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" }, - "shortName": "Insert" + "shortName": "GetHealth" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertPublicDelegatedPrefixeRequest" + "type": "google.cloud.compute_v1.types.GetHealthRegionCompositeHealthCheckRequest" }, { "name": "project", @@ -41377,8 +45685,8 @@ "type": "str" }, { - "name": "public_delegated_prefix_resource", - "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + "name": "composite_health_check", + "type": "str" }, { "name": "retry", @@ -41393,22 +45701,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.CompositeHealthCheckHealth", + "shortName": "get_health" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_public_delegated_prefixes_insert_sync.py", + "description": "Sample for GetHealth", + "file": "compute_v1_generated_region_composite_health_checks_get_health_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Insert_sync", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_GetHealth_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -41418,43 +45726,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_insert_sync.py" + "title": "compute_v1_generated_region_composite_health_checks_get_health_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", - "shortName": "PublicDelegatedPrefixesClient" + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.list", + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.get", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.List", + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Get", "service": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", - "shortName": "PublicDelegatedPrefixes" + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" }, - "shortName": "List" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListPublicDelegatedPrefixesRequest" + "type": "google.cloud.compute_v1.types.GetRegionCompositeHealthCheckRequest" }, { "name": "project", @@ -41464,6 +45772,10 @@ "name": "region", "type": "str" }, + { + "name": "composite_health_check", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41477,14 +45789,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.public_delegated_prefixes.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.CompositeHealthCheck", + "shortName": "get" }, - "description": "Sample for List", - "file": "compute_v1_generated_public_delegated_prefixes_list_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_composite_health_checks_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_List_sync", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Get_sync", "segments": [ { "end": 53, @@ -41502,43 +45814,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_list_sync.py" + "title": "compute_v1_generated_region_composite_health_checks_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", - "shortName": "PublicDelegatedPrefixesClient" + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.patch", + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.insert", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Patch", + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Insert", "service": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", - "shortName": "PublicDelegatedPrefixes" + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" }, - "shortName": "Patch" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchPublicDelegatedPrefixeRequest" + "type": "google.cloud.compute_v1.types.InsertRegionCompositeHealthCheckRequest" }, { "name": "project", @@ -41549,12 +45861,8 @@ "type": "str" }, { - "name": "public_delegated_prefix", - "type": "str" - }, - { - "name": "public_delegated_prefix_resource", - "type": "google.cloud.compute_v1.types.PublicDelegatedPrefix" + "name": "composite_health_check_resource", + "type": "google.cloud.compute_v1.types.CompositeHealthCheck" }, { "name": "retry", @@ -41570,21 +45878,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "insert" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_public_delegated_prefixes_patch_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_composite_health_checks_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Patch_sync", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -41594,43 +45902,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_patch_sync.py" + "title": "compute_v1_generated_region_composite_health_checks_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient", - "shortName": "PublicDelegatedPrefixesClient" + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.PublicDelegatedPrefixesClient.withdraw", + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.list", "method": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes.Withdraw", + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.List", "service": { - "fullName": "google.cloud.compute.v1.PublicDelegatedPrefixes", - "shortName": "PublicDelegatedPrefixes" + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" }, - "shortName": "Withdraw" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.WithdrawPublicDelegatedPrefixeRequest" + "type": "google.cloud.compute_v1.types.ListRegionCompositeHealthChecksRequest" }, { "name": "project", @@ -41640,10 +45948,6 @@ "name": "region", "type": "str" }, - { - "name": "public_delegated_prefix", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41657,14 +45961,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "withdraw" + "resultType": "google.cloud.compute_v1.services.region_composite_health_checks.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Withdraw", - "file": "compute_v1_generated_public_delegated_prefixes_withdraw_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_composite_health_checks_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_PublicDelegatedPrefixes_Withdraw_sync", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_List_sync", "segments": [ { "end": 53, @@ -41682,43 +45986,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_public_delegated_prefixes_withdraw_sync.py" + "title": "compute_v1_generated_region_composite_health_checks_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.delete", + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.Delete", + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" }, - "shortName": "Delete" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionAutoscalerRequest" + "type": "google.cloud.compute_v1.types.PatchRegionCompositeHealthCheckRequest" }, { "name": "project", @@ -41729,9 +46033,13 @@ "type": "str" }, { - "name": "autoscaler", + "name": "composite_health_check", "type": "str" }, + { + "name": "composite_health_check_resource", + "type": "google.cloud.compute_v1.types.CompositeHealthCheck" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41746,13 +46054,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "patch" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_autoscalers_delete_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_composite_health_checks_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_Delete_sync", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Patch_sync", "segments": [ { "end": 53, @@ -41785,28 +46093,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_autoscalers_delete_sync.py" + "title": "compute_v1_generated_region_composite_health_checks_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", + "shortName": "RegionCompositeHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.get", + "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.Get", + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" + "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", + "shortName": "RegionCompositeHealthChecks" }, - "shortName": "Get" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionAutoscalerRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionCompositeHealthCheckRequest" }, { "name": "project", @@ -41817,9 +46125,13 @@ "type": "str" }, { - "name": "autoscaler", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -41833,14 +46145,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Autoscaler", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_autoscalers_get_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_composite_health_checks_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_Get_sync", + "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -41873,28 +46185,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_autoscalers_get_sync.py" + "title": "compute_v1_generated_region_composite_health_checks_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient", + "shortName": "RegionDiskTypesClient" }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.insert", + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.Insert", + "fullName": "google.cloud.compute.v1.RegionDiskTypes.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" + "fullName": "google.cloud.compute.v1.RegionDiskTypes", + "shortName": "RegionDiskTypes" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionAutoscalerRequest" + "type": "google.cloud.compute_v1.types.GetRegionDiskTypeRequest" }, { "name": "project", @@ -41905,8 +46217,8 @@ "type": "str" }, { - "name": "autoscaler_resource", - "type": "google.cloud.compute_v1.types.Autoscaler" + "name": "disk_type", + "type": "str" }, { "name": "retry", @@ -41921,22 +46233,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.DiskType", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_autoscalers_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_disk_types_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_Insert_sync", + "regionTag": "compute_v1_generated_RegionDiskTypes_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -41946,43 +46258,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_autoscalers_insert_sync.py" + "title": "compute_v1_generated_region_disk_types_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient", + "shortName": "RegionDiskTypesClient" }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.list", + "fullName": "google.cloud.compute_v1.RegionDiskTypesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.List", + "fullName": "google.cloud.compute.v1.RegionDiskTypes.List", "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" + "fullName": "google.cloud.compute.v1.RegionDiskTypes", + "shortName": "RegionDiskTypes" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionAutoscalersRequest" + "type": "google.cloud.compute_v1.types.ListRegionDiskTypesRequest" }, { "name": "project", @@ -42005,14 +46317,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_autoscalers.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_disk_types.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_autoscalers_list_sync.py", + "file": "compute_v1_generated_region_disk_types_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_List_sync", + "regionTag": "compute_v1_generated_RegionDiskTypes_List_sync", "segments": [ { "end": 53, @@ -42045,116 +46357,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_autoscalers_list_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" - }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.patch", - "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.Patch", - "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" - }, - "shortName": "Patch" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionAutoscalerRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "autoscaler_resource", - "type": "google.cloud.compute_v1.types.Autoscaler" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" - }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_autoscalers_patch_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_Patch_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_region_autoscalers_patch_sync.py" + "title": "compute_v1_generated_region_disk_types_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionDisksClient.add_resource_policies", "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionDisks.AddResourcePolicies", "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "TestIamPermissions" + "shortName": "AddResourcePolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionAutoscalerRequest" + "type": "google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest" }, { "name": "project", @@ -42165,12 +46389,12 @@ "type": "str" }, { - "name": "resource", + "name": "disk", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "region_disks_add_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksAddResourcePoliciesRequest" }, { "name": "retry", @@ -42185,14 +46409,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_resource_policies" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_autoscalers_test_iam_permissions_sync.py", + "description": "Sample for AddResourcePolicies", + "file": "compute_v1_generated_region_disks_add_resource_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionDisks_AddResourcePolicies_sync", "segments": [ { "end": 53, @@ -42225,28 +46449,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_autoscalers_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_disks_add_resource_policies_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient", - "shortName": "RegionAutoscalersClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionAutoscalersClient.update", + "fullName": "google.cloud.compute_v1.RegionDisksClient.bulk_insert", "method": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers.Update", + "fullName": "google.cloud.compute.v1.RegionDisks.BulkInsert", "service": { - "fullName": "google.cloud.compute.v1.RegionAutoscalers", - "shortName": "RegionAutoscalers" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "Update" + "shortName": "BulkInsert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateRegionAutoscalerRequest" + "type": "google.cloud.compute_v1.types.BulkInsertRegionDiskRequest" }, { "name": "project", @@ -42257,8 +46481,8 @@ "type": "str" }, { - "name": "autoscaler_resource", - "type": "google.cloud.compute_v1.types.Autoscaler" + "name": "bulk_insert_disk_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertDiskResource" }, { "name": "retry", @@ -42274,13 +46498,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "shortName": "bulk_insert" }, - "description": "Sample for Update", - "file": "compute_v1_generated_region_autoscalers_update_sync.py", + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_region_disks_bulk_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionAutoscalers_Update_sync", + "regionTag": "compute_v1_generated_RegionDisks_BulkInsert_sync", "segments": [ { "end": 52, @@ -42313,28 +46537,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_autoscalers_update_sync.py" + "title": "compute_v1_generated_region_disks_bulk_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.delete", + "fullName": "google.cloud.compute_v1.RegionDisksClient.create_snapshot", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Delete", + "fullName": "google.cloud.compute.v1.RegionDisks.CreateSnapshot", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "Delete" + "shortName": "CreateSnapshot" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest" }, { "name": "project", @@ -42345,9 +46569,13 @@ "type": "str" }, { - "name": "backend_service", + "name": "disk", "type": "str" }, + { + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -42362,13 +46590,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "create_snapshot" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_backend_services_delete_sync.py", + "description": "Sample for CreateSnapshot", + "file": "compute_v1_generated_region_disks_create_snapshot_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Delete_sync", + "regionTag": "compute_v1_generated_RegionDisks_CreateSnapshot_sync", "segments": [ { "end": 53, @@ -42401,28 +46629,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_delete_sync.py" + "title": "compute_v1_generated_region_disks_create_snapshot_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_health", + "fullName": "google.cloud.compute_v1.RegionDisksClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.GetHealth", + "fullName": "google.cloud.compute.v1.RegionDisks.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "GetHealth" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetHealthRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionDiskRequest" }, { "name": "project", @@ -42433,13 +46661,9 @@ "type": "str" }, { - "name": "backend_service", + "name": "disk", "type": "str" }, - { - "name": "resource_group_reference_resource", - "type": "google.cloud.compute_v1.types.ResourceGroupReference" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -42453,14 +46677,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.BackendServiceGroupHealth", - "shortName": "get_health" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for GetHealth", - "file": "compute_v1_generated_region_backend_services_get_health_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_disks_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_GetHealth_sync", + "regionTag": "compute_v1_generated_RegionDisks_Delete_sync", "segments": [ { "end": 53, @@ -42493,28 +46717,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_get_health_sync.py" + "title": "compute_v1_generated_region_disks_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.RegionDisksClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.GetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionDisks.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionDiskRequest" }, { "name": "project", @@ -42545,10 +46769,10 @@ "shortName": "get_iam_policy" }, "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py", + "file": "compute_v1_generated_region_disks_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionDisks_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -42581,28 +46805,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_get_iam_policy_sync.py" + "title": "compute_v1_generated_region_disks_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.get", + "fullName": "google.cloud.compute_v1.RegionDisksClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Get", + "fullName": "google.cloud.compute.v1.RegionDisks.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.GetRegionDiskRequest" }, { "name": "project", @@ -42613,7 +46837,7 @@ "type": "str" }, { - "name": "backend_service", + "name": "disk", "type": "str" }, { @@ -42629,14 +46853,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.BackendService", + "resultType": "google.cloud.compute_v1.types.Disk", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_backend_services_get_sync.py", + "file": "compute_v1_generated_region_disks_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Get_sync", + "regionTag": "compute_v1_generated_RegionDisks_Get_sync", "segments": [ { "end": 53, @@ -42669,28 +46893,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_get_sync.py" + "title": "compute_v1_generated_region_disks_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.insert", + "fullName": "google.cloud.compute_v1.RegionDisksClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Insert", + "fullName": "google.cloud.compute.v1.RegionDisks.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.InsertRegionDiskRequest" }, { "name": "project", @@ -42701,8 +46925,8 @@ "type": "str" }, { - "name": "backend_service_resource", - "type": "google.cloud.compute_v1.types.BackendService" + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" }, { "name": "retry", @@ -42721,10 +46945,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_backend_services_insert_sync.py", + "file": "compute_v1_generated_region_disks_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Insert_sync", + "regionTag": "compute_v1_generated_RegionDisks_Insert_sync", "segments": [ { "end": 52, @@ -42757,28 +46981,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_insert_sync.py" + "title": "compute_v1_generated_region_disks_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.list_usable", + "fullName": "google.cloud.compute_v1.RegionDisksClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.ListUsable", + "fullName": "google.cloud.compute.v1.RegionDisks.List", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "ListUsable" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListUsableRegionBackendServicesRequest" + "type": "google.cloud.compute_v1.types.ListRegionDisksRequest" }, { "name": "project", @@ -42801,14 +47025,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_backend_services.pagers.ListUsablePager", - "shortName": "list_usable" + "resultType": "google.cloud.compute_v1.services.region_disks.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for ListUsable", - "file": "compute_v1_generated_region_backend_services_list_usable_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_disks_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_ListUsable_sync", + "regionTag": "compute_v1_generated_RegionDisks_List_sync", "segments": [ { "end": 53, @@ -42841,28 +47065,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_list_usable_sync.py" + "title": "compute_v1_generated_region_disks_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.list", + "fullName": "google.cloud.compute_v1.RegionDisksClient.remove_resource_policies", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.List", + "fullName": "google.cloud.compute.v1.RegionDisks.RemoveResourcePolicies", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "List" + "shortName": "RemoveResourcePolicies" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionBackendServicesRequest" + "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest" }, { "name": "project", @@ -42872,6 +47096,14 @@ "name": "region", "type": "str" }, + { + "name": "disk", + "type": "str" + }, + { + "name": "region_disks_remove_resource_policies_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksRemoveResourcePoliciesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -42885,14 +47117,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_backend_services.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_resource_policies" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_backend_services_list_sync.py", + "description": "Sample for RemoveResourcePolicies", + "file": "compute_v1_generated_region_disks_remove_resource_policies_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_List_sync", + "regionTag": "compute_v1_generated_RegionDisks_RemoveResourcePolicies_sync", "segments": [ { "end": 53, @@ -42910,43 +47142,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_list_sync.py" + "title": "compute_v1_generated_region_disks_remove_resource_policies_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.patch", + "fullName": "google.cloud.compute_v1.RegionDisksClient.resize", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Patch", + "fullName": "google.cloud.compute.v1.RegionDisks.Resize", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "Patch" + "shortName": "Resize" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.ResizeRegionDiskRequest" }, { "name": "project", @@ -42957,12 +47189,12 @@ "type": "str" }, { - "name": "backend_service", + "name": "disk", "type": "str" }, { - "name": "backend_service_resource", - "type": "google.cloud.compute_v1.types.BackendService" + "name": "region_disks_resize_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksResizeRequest" }, { "name": "retry", @@ -42978,13 +47210,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "resize" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_backend_services_patch_sync.py", + "description": "Sample for Resize", + "file": "compute_v1_generated_region_disks_resize_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Patch_sync", + "regionTag": "compute_v1_generated_RegionDisks_Resize_sync", "segments": [ { "end": 53, @@ -43017,28 +47249,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_patch_sync.py" + "title": "compute_v1_generated_region_disks_resize_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.RegionDisksClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.SetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionDisks.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionDiskRequest" }, { "name": "project", @@ -43073,10 +47305,10 @@ "shortName": "set_iam_policy" }, "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py", + "file": "compute_v1_generated_region_disks_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionDisks_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -43109,28 +47341,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_set_iam_policy_sync.py" + "title": "compute_v1_generated_region_disks_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.set_security_policy", + "fullName": "google.cloud.compute_v1.RegionDisksClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.SetSecurityPolicy", + "fullName": "google.cloud.compute.v1.RegionDisks.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "SetSecurityPolicy" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetSecurityPolicyRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.SetLabelsRegionDiskRequest" }, { "name": "project", @@ -43141,12 +47373,12 @@ "type": "str" }, { - "name": "backend_service", + "name": "resource", "type": "str" }, { - "name": "security_policy_reference_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyReference" + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" }, { "name": "retry", @@ -43162,13 +47394,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_security_policy" + "shortName": "set_labels" }, - "description": "Sample for SetSecurityPolicy", - "file": "compute_v1_generated_region_backend_services_set_security_policy_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_region_disks_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_SetSecurityPolicy_sync", + "regionTag": "compute_v1_generated_RegionDisks_SetLabels_sync", "segments": [ { "end": 53, @@ -43201,28 +47433,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_set_security_policy_sync.py" + "title": "compute_v1_generated_region_disks_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionDisksClient.start_async_replication", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionDisks.StartAsyncReplication", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "TestIamPermissions" + "shortName": "StartAsyncReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.StartAsyncReplicationRegionDiskRequest" }, { "name": "project", @@ -43233,12 +47465,12 @@ "type": "str" }, { - "name": "resource", + "name": "disk", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "region_disks_start_async_replication_request_resource", + "type": "google.cloud.compute_v1.types.RegionDisksStartAsyncReplicationRequest" }, { "name": "retry", @@ -43253,14 +47485,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "start_async_replication" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_backend_services_test_iam_permissions_sync.py", + "description": "Sample for StartAsyncReplication", + "file": "compute_v1_generated_region_disks_start_async_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionDisks_StartAsyncReplication_sync", "segments": [ { "end": 53, @@ -43293,28 +47525,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_disks_start_async_replication_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient", - "shortName": "RegionBackendServicesClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionBackendServicesClient.update", + "fullName": "google.cloud.compute_v1.RegionDisksClient.stop_async_replication", "method": { - "fullName": "google.cloud.compute.v1.RegionBackendServices.Update", + "fullName": "google.cloud.compute.v1.RegionDisks.StopAsyncReplication", "service": { - "fullName": "google.cloud.compute.v1.RegionBackendServices", - "shortName": "RegionBackendServices" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "Update" + "shortName": "StopAsyncReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateRegionBackendServiceRequest" + "type": "google.cloud.compute_v1.types.StopAsyncReplicationRegionDiskRequest" }, { "name": "project", @@ -43325,13 +47557,9 @@ "type": "str" }, { - "name": "backend_service", + "name": "disk", "type": "str" }, - { - "name": "backend_service_resource", - "type": "google.cloud.compute_v1.types.BackendService" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -43346,13 +47574,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "shortName": "stop_async_replication" }, - "description": "Sample for Update", - "file": "compute_v1_generated_region_backend_services_update_sync.py", + "description": "Sample for StopAsyncReplication", + "file": "compute_v1_generated_region_disks_stop_async_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionBackendServices_Update_sync", + "regionTag": "compute_v1_generated_RegionDisks_StopAsyncReplication_sync", "segments": [ { "end": 53, @@ -43385,33 +47613,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_backend_services_update_sync.py" + "title": "compute_v1_generated_region_disks_stop_async_replication_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", - "shortName": "RegionCommitmentsClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.RegionDisksClient.stop_group_async_replication", "method": { - "fullName": "google.cloud.compute.v1.RegionCommitments.AggregatedList", + "fullName": "google.cloud.compute.v1.RegionDisks.StopGroupAsyncReplication", "service": { - "fullName": "google.cloud.compute.v1.RegionCommitments", - "shortName": "RegionCommitments" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "AggregatedList" + "shortName": "StopGroupAsyncReplication" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListRegionCommitmentsRequest" + "type": "google.cloud.compute_v1.types.StopGroupAsyncReplicationRegionDiskRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "disks_stop_group_async_replication_resource_resource", + "type": "google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -43425,14 +47661,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_commitments.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "stop_group_async_replication" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_region_commitments_aggregated_list_sync.py", + "description": "Sample for StopGroupAsyncReplication", + "file": "compute_v1_generated_region_disks_stop_group_async_replication_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCommitments_AggregatedList_sync", + "regionTag": "compute_v1_generated_RegionDisks_StopGroupAsyncReplication_sync", "segments": [ { "end": 52, @@ -43450,43 +47686,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 49, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_commitments_aggregated_list_sync.py" + "title": "compute_v1_generated_region_disks_stop_group_async_replication_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", - "shortName": "RegionCommitmentsClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.get", + "fullName": "google.cloud.compute_v1.RegionDisksClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionCommitments.Get", + "fullName": "google.cloud.compute.v1.RegionDisks.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionCommitments", - "shortName": "RegionCommitments" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "Get" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionCommitmentRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionDiskRequest" }, { "name": "project", @@ -43497,9 +47733,13 @@ "type": "str" }, { - "name": "commitment", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -43513,14 +47753,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Commitment", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_commitments_get_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_disks_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCommitments_Get_sync", + "regionTag": "compute_v1_generated_RegionDisks_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -43553,28 +47793,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_commitments_get_sync.py" + "title": "compute_v1_generated_region_disks_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", - "shortName": "RegionCommitmentsClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.insert", + "fullName": "google.cloud.compute_v1.RegionDisksClient.update_kms_key", "method": { - "fullName": "google.cloud.compute.v1.RegionCommitments.Insert", + "fullName": "google.cloud.compute.v1.RegionDisks.UpdateKmsKey", "service": { - "fullName": "google.cloud.compute.v1.RegionCommitments", - "shortName": "RegionCommitments" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, - "shortName": "Insert" + "shortName": "UpdateKmsKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionCommitmentRequest" + "type": "google.cloud.compute_v1.types.UpdateKmsKeyRegionDiskRequest" }, { "name": "project", @@ -43585,92 +47825,12 @@ "type": "str" }, { - "name": "commitment_resource", - "type": "google.cloud.compute_v1.types.Commitment" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" - }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_commitments_insert_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCommitments_Insert_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_region_commitments_insert_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", - "shortName": "RegionCommitmentsClient" - }, - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.list", - "method": { - "fullName": "google.cloud.compute.v1.RegionCommitments.List", - "service": { - "fullName": "google.cloud.compute.v1.RegionCommitments", - "shortName": "RegionCommitments" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionCommitmentsRequest" - }, - { - "name": "project", + "name": "disk", "type": "str" }, { - "name": "region", - "type": "str" + "name": "region_disk_update_kms_key_request_resource", + "type": "google.cloud.compute_v1.types.RegionDiskUpdateKmsKeyRequest" }, { "name": "retry", @@ -43685,14 +47845,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_commitments.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_commitments_list_sync.py", + "description": "Sample for UpdateKmsKey", + "file": "compute_v1_generated_region_disks_update_kms_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCommitments_List_sync", + "regionTag": "compute_v1_generated_RegionDisks_UpdateKmsKey_sync", "segments": [ { "end": 53, @@ -43710,43 +47870,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_commitments_list_sync.py" + "title": "compute_v1_generated_region_disks_update_kms_key_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient", - "shortName": "RegionCommitmentsClient" + "fullName": "google.cloud.compute_v1.RegionDisksClient", + "shortName": "RegionDisksClient" }, - "fullName": "google.cloud.compute_v1.RegionCommitmentsClient.update", + "fullName": "google.cloud.compute_v1.RegionDisksClient.update", "method": { - "fullName": "google.cloud.compute.v1.RegionCommitments.Update", + "fullName": "google.cloud.compute.v1.RegionDisks.Update", "service": { - "fullName": "google.cloud.compute.v1.RegionCommitments", - "shortName": "RegionCommitments" + "fullName": "google.cloud.compute.v1.RegionDisks", + "shortName": "RegionDisks" }, "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateRegionCommitmentRequest" + "type": "google.cloud.compute_v1.types.UpdateRegionDiskRequest" }, { "name": "project", @@ -43757,12 +47917,12 @@ "type": "str" }, { - "name": "commitment", + "name": "disk", "type": "str" }, { - "name": "commitment_resource", - "type": "google.cloud.compute_v1.types.Commitment" + "name": "disk_resource", + "type": "google.cloud.compute_v1.types.Disk" }, { "name": "retry", @@ -43781,10 +47941,10 @@ "shortName": "update" }, "description": "Sample for Update", - "file": "compute_v1_generated_region_commitments_update_sync.py", + "file": "compute_v1_generated_region_disks_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCommitments_Update_sync", + "regionTag": "compute_v1_generated_RegionDisks_Update_sync", "segments": [ { "end": 53, @@ -43817,28 +47977,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_commitments_update_sync.py" + "title": "compute_v1_generated_region_disks_update_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.aggregated_list", + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.AggregatedList", + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" }, "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListRegionCompositeHealthChecksRequest" + "type": "google.cloud.compute_v1.types.AggregatedListRegionHealthAggregationPoliciesRequest" }, { "name": "project", @@ -43857,14 +48017,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_composite_health_checks.pagers.AggregatedListPager", + "resultType": "google.cloud.compute_v1.services.region_health_aggregation_policies.pagers.AggregatedListPager", "shortName": "aggregated_list" }, "description": "Sample for AggregatedList", - "file": "compute_v1_generated_region_composite_health_checks_aggregated_list_sync.py", + "file": "compute_v1_generated_region_health_aggregation_policies_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_AggregatedList_sync", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_AggregatedList_sync", "segments": [ { "end": 52, @@ -43897,28 +48057,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_aggregated_list_sync.py" + "title": "compute_v1_generated_region_health_aggregation_policies_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.delete", + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Delete", + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" }, "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionCompositeHealthCheckRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionHealthAggregationPolicyRequest" }, { "name": "project", @@ -43929,7 +48089,7 @@ "type": "str" }, { - "name": "composite_health_check", + "name": "health_aggregation_policy", "type": "str" }, { @@ -43949,10 +48109,10 @@ "shortName": "delete" }, "description": "Sample for Delete", - "file": "compute_v1_generated_region_composite_health_checks_delete_sync.py", + "file": "compute_v1_generated_region_health_aggregation_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Delete_sync", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Delete_sync", "segments": [ { "end": 53, @@ -43985,28 +48145,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_delete_sync.py" + "title": "compute_v1_generated_region_health_aggregation_policies_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.get", + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Get", + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionCompositeHealthCheckRequest" + "type": "google.cloud.compute_v1.types.GetRegionHealthAggregationPolicyRequest" }, { "name": "project", @@ -44017,7 +48177,7 @@ "type": "str" }, { - "name": "composite_health_check", + "name": "health_aggregation_policy", "type": "str" }, { @@ -44033,14 +48193,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.CompositeHealthCheck", + "resultType": "google.cloud.compute_v1.types.HealthAggregationPolicy", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_composite_health_checks_get_sync.py", + "file": "compute_v1_generated_region_health_aggregation_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Get_sync", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Get_sync", "segments": [ { "end": 53, @@ -44073,28 +48233,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_get_sync.py" + "title": "compute_v1_generated_region_health_aggregation_policies_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.insert", + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Insert", + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionCompositeHealthCheckRequest" + "type": "google.cloud.compute_v1.types.InsertRegionHealthAggregationPolicyRequest" }, { "name": "project", @@ -44105,8 +48265,8 @@ "type": "str" }, { - "name": "composite_health_check_resource", - "type": "google.cloud.compute_v1.types.CompositeHealthCheck" + "name": "health_aggregation_policy_resource", + "type": "google.cloud.compute_v1.types.HealthAggregationPolicy" }, { "name": "retry", @@ -44125,10 +48285,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_composite_health_checks_insert_sync.py", + "file": "compute_v1_generated_region_health_aggregation_policies_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Insert_sync", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Insert_sync", "segments": [ { "end": 52, @@ -44161,28 +48321,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_insert_sync.py" + "title": "compute_v1_generated_region_health_aggregation_policies_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.list", + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.List", + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.List", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionCompositeHealthChecksRequest" + "type": "google.cloud.compute_v1.types.ListRegionHealthAggregationPoliciesRequest" }, { "name": "project", @@ -44205,14 +48365,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_composite_health_checks.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_health_aggregation_policies.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_composite_health_checks_list_sync.py", + "file": "compute_v1_generated_region_health_aggregation_policies_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_List_sync", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_List_sync", "segments": [ { "end": 53, @@ -44245,28 +48405,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_list_sync.py" + "title": "compute_v1_generated_region_health_aggregation_policies_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.patch", + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.Patch", + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" }, "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionCompositeHealthCheckRequest" + "type": "google.cloud.compute_v1.types.PatchRegionHealthAggregationPolicyRequest" }, { "name": "project", @@ -44277,12 +48437,12 @@ "type": "str" }, { - "name": "composite_health_check", + "name": "health_aggregation_policy", "type": "str" }, { - "name": "composite_health_check_resource", - "type": "google.cloud.compute_v1.types.CompositeHealthCheck" + "name": "health_aggregation_policy_resource", + "type": "google.cloud.compute_v1.types.HealthAggregationPolicy" }, { "name": "retry", @@ -44301,10 +48461,102 @@ "shortName": "patch" }, "description": "Sample for Patch", - "file": "compute_v1_generated_region_composite_health_checks_patch_sync.py", + "file": "compute_v1_generated_region_health_aggregation_policies_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_Patch_sync", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Patch_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_health_aggregation_policies_patch_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", + "shortName": "RegionHealthAggregationPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", + "shortName": "RegionHealthAggregationPolicies" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthAggregationPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_health_aggregation_policies_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -44337,45 +48589,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_patch_sync.py" + "title": "compute_v1_generated_region_health_aggregation_policies_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient", - "shortName": "RegionCompositeHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionCompositeHealthChecksClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.RegionCompositeHealthChecks", - "shortName": "RegionCompositeHealthChecks" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "TestIamPermissions" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionCompositeHealthCheckRequest" + "type": "google.cloud.compute_v1.types.AggregatedListRegionHealthCheckServicesRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -44389,22 +48629,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.services.region_health_check_services.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_composite_health_checks_test_iam_permissions_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_region_health_check_services_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionCompositeHealthChecks_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -44414,43 +48654,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_composite_health_checks_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_health_check_services_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDiskTypesClient", - "shortName": "RegionDiskTypesClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionDiskTypesClient.get", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionDiskTypes.Get", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionDiskTypes", - "shortName": "RegionDiskTypes" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "Get" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionDiskTypeRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest" }, { "name": "project", @@ -44461,7 +48701,7 @@ "type": "str" }, { - "name": "disk_type", + "name": "health_check_service", "type": "str" }, { @@ -44477,14 +48717,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.DiskType", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_disk_types_get_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_health_check_services_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDiskTypes_Get_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Delete_sync", "segments": [ { "end": 53, @@ -44517,28 +48757,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disk_types_get_sync.py" + "title": "compute_v1_generated_region_health_check_services_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDiskTypesClient", - "shortName": "RegionDiskTypesClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionDiskTypesClient.list", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionDiskTypes.List", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionDiskTypes", - "shortName": "RegionDiskTypes" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "List" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionDiskTypesRequest" + "type": "google.cloud.compute_v1.types.GetRegionHealthCheckServiceRequest" }, { "name": "project", @@ -44548,6 +48788,10 @@ "name": "region", "type": "str" }, + { + "name": "health_check_service", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -44561,14 +48805,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_disk_types.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.types.HealthCheckService", + "shortName": "get" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_disk_types_list_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_health_check_services_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDiskTypes_List_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Get_sync", "segments": [ { "end": 53, @@ -44586,43 +48830,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disk_types_list_sync.py" + "title": "compute_v1_generated_region_health_check_services_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.add_resource_policies", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.AddResourcePolicies", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "AddResourcePolicies" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddResourcePoliciesRegionDiskRequest" + "type": "google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest" }, { "name": "project", @@ -44633,12 +48877,8 @@ "type": "str" }, { - "name": "disk", - "type": "str" - }, - { - "name": "region_disks_add_resource_policies_request_resource", - "type": "google.cloud.compute_v1.types.RegionDisksAddResourcePoliciesRequest" + "name": "health_check_service_resource", + "type": "google.cloud.compute_v1.types.HealthCheckService" }, { "name": "retry", @@ -44654,21 +48894,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_resource_policies" + "shortName": "insert" }, - "description": "Sample for AddResourcePolicies", - "file": "compute_v1_generated_region_disks_add_resource_policies_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_health_check_services_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_AddResourcePolicies_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -44678,43 +48918,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_add_resource_policies_sync.py" + "title": "compute_v1_generated_region_health_check_services_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.bulk_insert", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.BulkInsert", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.List", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "BulkInsert" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.BulkInsertRegionDiskRequest" + "type": "google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest" }, { "name": "project", @@ -44724,10 +48964,6 @@ "name": "region", "type": "str" }, - { - "name": "bulk_insert_disk_resource_resource", - "type": "google.cloud.compute_v1.types.BulkInsertDiskResource" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -44741,22 +48977,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "bulk_insert" + "resultType": "google.cloud.compute_v1.services.region_health_check_services.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for BulkInsert", - "file": "compute_v1_generated_region_disks_bulk_insert_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_health_check_services_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_BulkInsert_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -44776,33 +49012,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_bulk_insert_sync.py" + "title": "compute_v1_generated_region_health_check_services_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.create_snapshot", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.CreateSnapshot", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "CreateSnapshot" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CreateSnapshotRegionDiskRequest" + "type": "google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest" }, { "name": "project", @@ -44813,12 +49049,12 @@ "type": "str" }, { - "name": "disk", + "name": "health_check_service", "type": "str" }, { - "name": "snapshot_resource", - "type": "google.cloud.compute_v1.types.Snapshot" + "name": "health_check_service_resource", + "type": "google.cloud.compute_v1.types.HealthCheckService" }, { "name": "retry", @@ -44834,13 +49070,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "create_snapshot" + "shortName": "patch" }, - "description": "Sample for CreateSnapshot", - "file": "compute_v1_generated_region_disks_create_snapshot_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_health_check_services_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_CreateSnapshot_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_Patch_sync", "segments": [ { "end": 53, @@ -44873,28 +49109,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_create_snapshot_sync.py" + "title": "compute_v1_generated_region_health_check_services_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", + "shortName": "RegionHealthCheckServicesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.delete", + "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.Delete", + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", + "shortName": "RegionHealthCheckServices" }, - "shortName": "Delete" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionDiskRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthCheckServiceRequest" }, { "name": "project", @@ -44905,9 +49141,13 @@ "type": "str" }, { - "name": "disk", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -44921,14 +49161,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_disks_delete_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_health_check_services_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_Delete_sync", + "regionTag": "compute_v1_generated_RegionHealthCheckServices_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -44961,28 +49201,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_delete_sync.py" + "title": "compute_v1_generated_region_health_check_services_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.GetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, - "shortName": "GetIamPolicy" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyRegionDiskRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest" }, { "name": "project", @@ -44993,7 +49233,7 @@ "type": "str" }, { - "name": "resource", + "name": "health_check", "type": "str" }, { @@ -45009,14 +49249,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_region_disks_get_iam_policy_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_health_checks_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_Delete_sync", "segments": [ { "end": 53, @@ -45049,28 +49289,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_get_iam_policy_sync.py" + "title": "compute_v1_generated_region_health_checks_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.get", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.Get", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionDiskRequest" + "type": "google.cloud.compute_v1.types.GetRegionHealthCheckRequest" }, { "name": "project", @@ -45081,7 +49321,7 @@ "type": "str" }, { - "name": "disk", + "name": "health_check", "type": "str" }, { @@ -45097,14 +49337,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Disk", + "resultType": "google.cloud.compute_v1.types.HealthCheck", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_disks_get_sync.py", + "file": "compute_v1_generated_region_health_checks_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_Get_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_Get_sync", "segments": [ { "end": 53, @@ -45137,28 +49377,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_get_sync.py" + "title": "compute_v1_generated_region_health_checks_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.insert", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.Insert", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionDiskRequest" + "type": "google.cloud.compute_v1.types.InsertRegionHealthCheckRequest" }, { "name": "project", @@ -45169,8 +49409,8 @@ "type": "str" }, { - "name": "disk_resource", - "type": "google.cloud.compute_v1.types.Disk" + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" }, { "name": "retry", @@ -45189,10 +49429,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_disks_insert_sync.py", + "file": "compute_v1_generated_region_health_checks_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_Insert_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_Insert_sync", "segments": [ { "end": 52, @@ -45225,28 +49465,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_insert_sync.py" + "title": "compute_v1_generated_region_health_checks_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.list", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.List", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.List", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionDisksRequest" + "type": "google.cloud.compute_v1.types.ListRegionHealthChecksRequest" }, { "name": "project", @@ -45269,14 +49509,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_disks.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_health_checks.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_disks_list_sync.py", + "file": "compute_v1_generated_region_health_checks_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_List_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_List_sync", "segments": [ { "end": 53, @@ -45309,120 +49549,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_list_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" - }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.remove_resource_policies", - "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.RemoveResourcePolicies", - "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" - }, - "shortName": "RemoveResourcePolicies" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.RemoveResourcePoliciesRegionDiskRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "disk", - "type": "str" - }, - { - "name": "region_disks_remove_resource_policies_request_resource", - "type": "google.cloud.compute_v1.types.RegionDisksRemoveResourcePoliciesRequest" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_resource_policies" - }, - "description": "Sample for RemoveResourcePolicies", - "file": "compute_v1_generated_region_disks_remove_resource_policies_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_RemoveResourcePolicies_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 47, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 50, - "start": 48, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 51, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_region_disks_remove_resource_policies_sync.py" + "title": "compute_v1_generated_region_health_checks_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.resize", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.Resize", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, - "shortName": "Resize" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ResizeRegionDiskRequest" + "type": "google.cloud.compute_v1.types.PatchRegionHealthCheckRequest" }, { "name": "project", @@ -45433,12 +49581,12 @@ "type": "str" }, { - "name": "disk", + "name": "health_check", "type": "str" }, { - "name": "region_disks_resize_request_resource", - "type": "google.cloud.compute_v1.types.RegionDisksResizeRequest" + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" }, { "name": "retry", @@ -45454,13 +49602,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "resize" + "shortName": "patch" }, - "description": "Sample for Resize", - "file": "compute_v1_generated_region_disks_resize_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_health_checks_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_Resize_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_Patch_sync", "segments": [ { "end": 53, @@ -45493,28 +49641,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_resize_sync.py" + "title": "compute_v1_generated_region_health_checks_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.SetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, - "shortName": "SetIamPolicy" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyRegionDiskRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthCheckRequest" }, { "name": "project", @@ -45529,8 +49677,8 @@ "type": "str" }, { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -45545,14 +49693,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_region_disks_set_iam_policy_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_health_checks_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -45585,28 +49733,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_set_iam_policy_sync.py" + "title": "compute_v1_generated_region_health_checks_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", + "shortName": "RegionHealthChecksClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.set_labels", + "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.update", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.SetLabels", + "fullName": "google.cloud.compute.v1.RegionHealthChecks.Update", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthChecks", + "shortName": "RegionHealthChecks" }, - "shortName": "SetLabels" + "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsRegionDiskRequest" + "type": "google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest" }, { "name": "project", @@ -45617,12 +49765,12 @@ "type": "str" }, { - "name": "resource", + "name": "health_check", "type": "str" }, { - "name": "region_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + "name": "health_check_resource", + "type": "google.cloud.compute_v1.types.HealthCheck" }, { "name": "retry", @@ -45638,13 +49786,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "shortName": "update" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_region_disks_set_labels_sync.py", + "description": "Sample for Update", + "file": "compute_v1_generated_region_health_checks_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_SetLabels_sync", + "regionTag": "compute_v1_generated_RegionHealthChecks_Update_sync", "segments": [ { "end": 53, @@ -45677,45 +49825,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_set_labels_sync.py" + "title": "compute_v1_generated_region_health_checks_update_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.start_async_replication", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.StartAsyncReplication", + "fullName": "google.cloud.compute.v1.RegionHealthSources.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "StartAsyncReplication" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StartAsyncReplicationRegionDiskRequest" + "type": "google.cloud.compute_v1.types.AggregatedListRegionHealthSourcesRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "disk", - "type": "str" - }, - { - "name": "region_disks_start_async_replication_request_resource", - "type": "google.cloud.compute_v1.types.RegionDisksStartAsyncReplicationRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -45729,22 +49865,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "start_async_replication" + "resultType": "google.cloud.compute_v1.services.region_health_sources.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for StartAsyncReplication", - "file": "compute_v1_generated_region_disks_start_async_replication_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_region_health_sources_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_StartAsyncReplication_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -45754,43 +49890,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_start_async_replication_sync.py" + "title": "compute_v1_generated_region_health_sources_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.stop_async_replication", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.StopAsyncReplication", + "fullName": "google.cloud.compute.v1.RegionHealthSources.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "StopAsyncReplication" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StopAsyncReplicationRegionDiskRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionHealthSourceRequest" }, { "name": "project", @@ -45801,7 +49937,7 @@ "type": "str" }, { - "name": "disk", + "name": "health_source", "type": "str" }, { @@ -45818,13 +49954,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "stop_async_replication" + "shortName": "delete" }, - "description": "Sample for StopAsyncReplication", - "file": "compute_v1_generated_region_disks_stop_async_replication_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_health_sources_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_StopAsyncReplication_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_Delete_sync", "segments": [ { "end": 53, @@ -45857,28 +49993,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_stop_async_replication_sync.py" + "title": "compute_v1_generated_region_health_sources_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.stop_group_async_replication", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.get_health", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.StopGroupAsyncReplication", + "fullName": "google.cloud.compute.v1.RegionHealthSources.GetHealth", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "StopGroupAsyncReplication" + "shortName": "GetHealth" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StopGroupAsyncReplicationRegionDiskRequest" + "type": "google.cloud.compute_v1.types.GetHealthRegionHealthSourceRequest" }, { "name": "project", @@ -45889,8 +50025,8 @@ "type": "str" }, { - "name": "disks_stop_group_async_replication_resource_resource", - "type": "google.cloud.compute_v1.types.DisksStopGroupAsyncReplicationResource" + "name": "health_source", + "type": "str" }, { "name": "retry", @@ -45905,22 +50041,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "stop_group_async_replication" + "resultType": "google.cloud.compute_v1.types.HealthSourceHealth", + "shortName": "get_health" }, - "description": "Sample for StopGroupAsyncReplication", - "file": "compute_v1_generated_region_disks_stop_group_async_replication_sync.py", + "description": "Sample for GetHealth", + "file": "compute_v1_generated_region_health_sources_get_health_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_StopGroupAsyncReplication_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_GetHealth_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -45930,43 +50066,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_stop_group_async_replication_sync.py" + "title": "compute_v1_generated_region_health_sources_get_health_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionHealthSources.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "TestIamPermissions" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionDiskRequest" + "type": "google.cloud.compute_v1.types.GetRegionHealthSourceRequest" }, { "name": "project", @@ -45977,13 +50113,9 @@ "type": "str" }, { - "name": "resource", + "name": "health_source", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -45997,14 +50129,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.HealthSource", + "shortName": "get" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_disks_test_iam_permissions_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_health_sources_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_Get_sync", "segments": [ { "end": 53, @@ -46037,28 +50169,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_health_sources_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionDisksClient", - "shortName": "RegionDisksClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionDisksClient.update", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionDisks.Update", + "fullName": "google.cloud.compute.v1.RegionHealthSources.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionDisks", - "shortName": "RegionDisks" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "Update" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateRegionDiskRequest" + "type": "google.cloud.compute_v1.types.InsertRegionHealthSourceRequest" }, { "name": "project", @@ -46069,12 +50201,8 @@ "type": "str" }, { - "name": "disk", - "type": "str" - }, - { - "name": "disk_resource", - "type": "google.cloud.compute_v1.types.Disk" + "name": "health_source_resource", + "type": "google.cloud.compute_v1.types.HealthSource" }, { "name": "retry", @@ -46090,21 +50218,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "shortName": "insert" }, - "description": "Sample for Update", - "file": "compute_v1_generated_region_disks_update_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_health_sources_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionDisks_Update_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -46114,48 +50242,52 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_disks_update_sync.py" + "title": "compute_v1_generated_region_health_sources_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.AggregatedList", + "fullName": "google.cloud.compute.v1.RegionHealthSources.List", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "AggregatedList" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListRegionHealthAggregationPoliciesRequest" + "type": "google.cloud.compute_v1.types.ListRegionHealthSourcesRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46169,22 +50301,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_aggregation_policies.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.services.region_health_sources.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_region_health_aggregation_policies_aggregated_list_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_health_sources_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_AggregatedList_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -46194,43 +50326,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_aggregated_list_sync.py" + "title": "compute_v1_generated_region_health_sources_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Delete", + "fullName": "google.cloud.compute.v1.RegionHealthSources.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "Delete" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionHealthAggregationPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchRegionHealthSourceRequest" }, { "name": "project", @@ -46241,9 +50373,13 @@ "type": "str" }, { - "name": "health_aggregation_policy", + "name": "health_source", "type": "str" }, + { + "name": "health_source_resource", + "type": "google.cloud.compute_v1.types.HealthSource" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46258,13 +50394,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "patch" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_health_aggregation_policies_delete_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_health_sources_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Delete_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_Patch_sync", "segments": [ { "end": 53, @@ -46297,28 +50433,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_delete_sync.py" + "title": "compute_v1_generated_region_health_sources_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", + "shortName": "RegionHealthSourcesClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.get", + "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Get", + "fullName": "google.cloud.compute.v1.RegionHealthSources.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionHealthSources", + "shortName": "RegionHealthSources" }, - "shortName": "Get" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionHealthAggregationPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthSourceRequest" }, { "name": "project", @@ -46329,9 +50465,13 @@ "type": "str" }, { - "name": "health_aggregation_policy", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46345,14 +50485,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.HealthAggregationPolicy", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_health_aggregation_policies_get_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_health_sources_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Get_sync", + "regionTag": "compute_v1_generated_RegionHealthSources_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -46385,28 +50525,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_get_sync.py" + "title": "compute_v1_generated_region_health_sources_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient", + "shortName": "RegionInstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.cancel", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Insert", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests.Cancel", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "shortName": "RegionInstanceGroupManagerResizeRequests" }, - "shortName": "Insert" + "shortName": "Cancel" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionHealthAggregationPolicyRequest" + "type": "google.cloud.compute_v1.types.CancelRegionInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -46417,8 +50557,12 @@ "type": "str" }, { - "name": "health_aggregation_policy_resource", - "type": "google.cloud.compute_v1.types.HealthAggregationPolicy" + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "resize_request", + "type": "str" }, { "name": "retry", @@ -46434,21 +50578,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "cancel" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_health_aggregation_policies_insert_sync.py", + "description": "Sample for Cancel", + "file": "compute_v1_generated_region_instance_group_manager_resize_requests_cancel_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Insert_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Cancel_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -46458,43 +50602,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_insert_sync.py" + "title": "compute_v1_generated_region_instance_group_manager_resize_requests_cancel_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient", + "shortName": "RegionInstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.list", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.List", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "shortName": "RegionInstanceGroupManagerResizeRequests" }, - "shortName": "List" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionHealthAggregationPoliciesRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -46504,6 +50648,14 @@ "name": "region", "type": "str" }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "resize_request", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46517,22 +50669,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_aggregation_policies.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_health_aggregation_policies_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_instance_group_manager_resize_requests_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_List_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Delete_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -46542,43 +50694,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_list_sync.py" + "title": "compute_v1_generated_region_instance_group_manager_resize_requests_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient", + "shortName": "RegionInstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.patch", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.Patch", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "shortName": "RegionInstanceGroupManagerResizeRequests" }, - "shortName": "Patch" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionHealthAggregationPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -46589,12 +50741,12 @@ "type": "str" }, { - "name": "health_aggregation_policy", + "name": "instance_group_manager", "type": "str" }, { - "name": "health_aggregation_policy_resource", - "type": "google.cloud.compute_v1.types.HealthAggregationPolicy" + "name": "resize_request", + "type": "str" }, { "name": "retry", @@ -46609,22 +50761,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest", + "shortName": "get" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_health_aggregation_policies_patch_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_instance_group_manager_resize_requests_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_Patch_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Get_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -46634,43 +50786,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_patch_sync.py" + "title": "compute_v1_generated_region_instance_group_manager_resize_requests_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient", - "shortName": "RegionHealthAggregationPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient", + "shortName": "RegionInstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthAggregationPoliciesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthAggregationPolicies", - "shortName": "RegionHealthAggregationPolicies" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "shortName": "RegionInstanceGroupManagerResizeRequests" }, - "shortName": "TestIamPermissions" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthAggregationPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerResizeRequestRequest" }, { "name": "project", @@ -46681,12 +50833,12 @@ "type": "str" }, { - "name": "resource", + "name": "instance_group_manager", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "instance_group_manager_resize_request_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManagerResizeRequest" }, { "name": "retry", @@ -46701,14 +50853,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_health_aggregation_policies_test_iam_permissions_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_instance_group_manager_resize_requests_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthAggregationPolicies_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagerResizeRequests_Insert_sync", "segments": [ { "end": 53, @@ -46741,33 +50893,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_aggregation_policies_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_instance_group_manager_resize_requests_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient", + "shortName": "RegionInstanceGroupManagerResizeRequestsClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagerResizeRequestsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.AggregatedList", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests.List", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagerResizeRequests", + "shortName": "RegionInstanceGroupManagerResizeRequests" }, - "shortName": "AggregatedList" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListRegionHealthCheckServicesRequest" + "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupManagerResizeRequestsRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46781,22 +50941,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_check_services.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_region_health_check_services_aggregated_list_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_group_manager_resize_requests_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_AggregatedList_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagerResizeRequests_List_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -46806,43 +50966,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_check_services_aggregated_list_sync.py" + "title": "compute_v1_generated_region_instance_group_manager_resize_requests_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.delete", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.abandon_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Delete", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.AbandonInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Delete" + "shortName": "AbandonInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionHealthCheckServiceRequest" + "type": "google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -46853,9 +51013,13 @@ "type": "str" }, { - "name": "health_check_service", + "name": "instance_group_manager", "type": "str" }, + { + "name": "region_instance_group_managers_abandon_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersAbandonInstancesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46870,13 +51034,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "abandon_instances" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_health_check_services_delete_sync.py", + "description": "Sample for AbandonInstances", + "file": "compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_Delete_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_AbandonInstances_sync", "segments": [ { "end": 53, @@ -46909,28 +51073,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_check_services_delete_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.get", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.apply_updates_to_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Get", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ApplyUpdatesToInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Get" + "shortName": "ApplyUpdatesToInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionHealthCheckServiceRequest" + "type": "google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -46941,9 +51105,13 @@ "type": "str" }, { - "name": "health_check_service", + "name": "instance_group_manager", "type": "str" }, + { + "name": "region_instance_group_managers_apply_updates_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersApplyUpdatesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -46957,14 +51125,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.HealthCheckService", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "apply_updates_to_instances" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_health_check_services_get_sync.py", + "description": "Sample for ApplyUpdatesToInstances", + "file": "compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_Get_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ApplyUpdatesToInstances_sync", "segments": [ { "end": 53, @@ -46997,28 +51165,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_check_services_get_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.insert", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.create_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Insert", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.CreateInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Insert" + "shortName": "CreateInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionHealthCheckServiceRequest" + "type": "google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -47029,92 +51197,12 @@ "type": "str" }, { - "name": "health_check_service_resource", - "type": "google.cloud.compute_v1.types.HealthCheckService" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" - }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_health_check_services_insert_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_Insert_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_region_health_check_services_insert_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" - }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.list", - "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.List", - "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" - }, - "shortName": "List" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionHealthCheckServicesRequest" - }, - { - "name": "project", + "name": "instance_group_manager", "type": "str" }, { - "name": "region", - "type": "str" + "name": "region_instance_group_managers_create_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersCreateInstancesRequest" }, { "name": "retry", @@ -47129,14 +51217,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_check_services.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "create_instances" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_health_check_services_list_sync.py", + "description": "Sample for CreateInstances", + "file": "compute_v1_generated_region_instance_group_managers_create_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_List_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_CreateInstances_sync", "segments": [ { "end": 53, @@ -47154,43 +51242,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_check_services_list_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_create_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.patch", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.Patch", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.DeleteInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Patch" + "shortName": "DeleteInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionHealthCheckServiceRequest" + "type": "google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -47201,12 +51289,12 @@ "type": "str" }, { - "name": "health_check_service", + "name": "instance_group_manager", "type": "str" }, { - "name": "health_check_service_resource", - "type": "google.cloud.compute_v1.types.HealthCheckService" + "name": "region_instance_group_managers_delete_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersDeleteInstancesRequest" }, { "name": "retry", @@ -47222,13 +51310,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "delete_instances" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_health_check_services_patch_sync.py", + "description": "Sample for DeleteInstances", + "file": "compute_v1_generated_region_instance_group_managers_delete_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_Patch_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_DeleteInstances_sync", "segments": [ { "end": 53, @@ -47261,28 +51349,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_check_services_patch_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_delete_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient", - "shortName": "RegionHealthCheckServicesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthCheckServicesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.DeletePerInstanceConfigs", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthCheckServices", - "shortName": "RegionHealthCheckServices" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "TestIamPermissions" + "shortName": "DeletePerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthCheckServiceRequest" + "type": "google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -47293,12 +51381,12 @@ "type": "str" }, { - "name": "resource", + "name": "instance_group_manager", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "region_instance_group_manager_delete_instance_config_req_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerDeleteInstanceConfigReq" }, { "name": "retry", @@ -47313,14 +51401,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete_per_instance_configs" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_health_check_services_test_iam_permissions_sync.py", + "description": "Sample for DeletePerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthCheckServices_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_DeletePerInstanceConfigs_sync", "segments": [ { "end": 53, @@ -47353,28 +51441,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_check_services_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.delete", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.Delete", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionHealthCheckRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -47385,7 +51473,7 @@ "type": "str" }, { - "name": "health_check", + "name": "instance_group_manager", "type": "str" }, { @@ -47405,10 +51493,10 @@ "shortName": "delete" }, "description": "Sample for Delete", - "file": "compute_v1_generated_region_health_checks_delete_sync.py", + "file": "compute_v1_generated_region_instance_group_managers_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_Delete_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Delete_sync", "segments": [ { "end": 53, @@ -47441,28 +51529,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_delete_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.get", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.Get", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionHealthCheckRequest" + "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -47473,7 +51561,7 @@ "type": "str" }, { - "name": "health_check", + "name": "instance_group_manager", "type": "str" }, { @@ -47489,14 +51577,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.HealthCheck", + "resultType": "google.cloud.compute_v1.types.InstanceGroupManager", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_health_checks_get_sync.py", + "file": "compute_v1_generated_region_instance_group_managers_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_Get_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Get_sync", "segments": [ { "end": 53, @@ -47529,28 +51617,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_get_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.insert", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.Insert", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionHealthCheckRequest" + "type": "google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -47561,8 +51649,8 @@ "type": "str" }, { - "name": "health_check_resource", - "type": "google.cloud.compute_v1.types.HealthCheck" + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" }, { "name": "retry", @@ -47581,10 +51669,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_health_checks_insert_sync.py", + "file": "compute_v1_generated_region_instance_group_managers_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_Insert_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Insert_sync", "segments": [ { "end": 52, @@ -47617,28 +51705,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_insert_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.list", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_errors", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.List", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListErrors", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "List" + "shortName": "ListErrors" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionHealthChecksRequest" + "type": "google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest" }, { "name": "project", @@ -47648,6 +51736,10 @@ "name": "region", "type": "str" }, + { + "name": "instance_group_manager", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -47661,22 +51753,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_checks.pagers.ListPager", - "shortName": "list" + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListErrorsPager", + "shortName": "list_errors" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_health_checks_list_sync.py", + "description": "Sample for ListErrors", + "file": "compute_v1_generated_region_instance_group_managers_list_errors_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_List_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListErrors_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -47686,43 +51778,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 50, + "end": 55, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_list_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_list_errors_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.patch", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_managed_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.Patch", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListManagedInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Patch" + "shortName": "ListManagedInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionHealthCheckRequest" + "type": "google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest" }, { "name": "project", @@ -47733,13 +51825,9 @@ "type": "str" }, { - "name": "health_check", + "name": "instance_group_manager", "type": "str" }, - { - "name": "health_check_resource", - "type": "google.cloud.compute_v1.types.HealthCheck" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -47753,22 +51841,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListManagedInstancesPager", + "shortName": "list_managed_instances" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_health_checks_patch_sync.py", + "description": "Sample for ListManagedInstances", + "file": "compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_Patch_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListManagedInstances_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -47788,33 +51876,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_patch_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListPerInstanceConfigs", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "TestIamPermissions" + "shortName": "ListPerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthCheckRequest" + "type": "google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest" }, { "name": "project", @@ -47825,13 +51913,9 @@ "type": "str" }, { - "name": "resource", + "name": "instance_group_manager", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -47845,22 +51929,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPerInstanceConfigsPager", + "shortName": "list_per_instance_configs" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_health_checks_test_iam_permissions_sync.py", + "description": "Sample for ListPerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListPerInstanceConfigs_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -47880,33 +51964,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient", - "shortName": "RegionHealthChecksClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthChecksClient.update", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks.Update", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.List", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthChecks", - "shortName": "RegionHealthChecks" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Update" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdateRegionHealthCheckRequest" + "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest" }, { "name": "project", @@ -47916,14 +52000,6 @@ "name": "region", "type": "str" }, - { - "name": "health_check", - "type": "str" - }, - { - "name": "health_check_resource", - "type": "google.cloud.compute_v1.types.HealthCheck" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -47937,14 +52013,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update" + "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Update", - "file": "compute_v1_generated_region_health_checks_update_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_group_managers_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthChecks_Update_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_List_sync", "segments": [ { "end": 53, @@ -47962,48 +52038,60 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_checks_update_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.aggregated_list", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.patch_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.AggregatedList", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.PatchPerInstanceConfigs", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "AggregatedList" + "shortName": "PatchPerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListRegionHealthSourcesRequest" + "type": "google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_manager_patch_instance_config_req_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerPatchInstanceConfigReq" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -48017,22 +52105,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_sources.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_per_instance_configs" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_region_health_sources_aggregated_list_sync.py", + "description": "Sample for PatchPerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_AggregatedList_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_PatchPerInstanceConfigs_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -48042,43 +52130,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_aggregated_list_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.delete", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.Delete", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Delete" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionHealthSourceRequest" + "type": "google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48089,9 +52177,13 @@ "type": "str" }, { - "name": "health_source", + "name": "instance_group_manager", "type": "str" }, + { + "name": "instance_group_manager_resource", + "type": "google.cloud.compute_v1.types.InstanceGroupManager" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -48106,13 +52198,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "patch" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_health_sources_delete_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_instance_group_managers_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_Delete_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Patch_sync", "segments": [ { "end": 53, @@ -48145,28 +52237,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_delete_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.get", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.recreate_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.Get", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.RecreateInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Get" + "shortName": "RecreateInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionHealthSourceRequest" + "type": "google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48177,9 +52269,13 @@ "type": "str" }, { - "name": "health_source", + "name": "instance_group_manager", "type": "str" }, + { + "name": "region_instance_group_managers_recreate_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersRecreateRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -48193,14 +52289,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.HealthSource", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "recreate_instances" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_health_sources_get_sync.py", + "description": "Sample for RecreateInstances", + "file": "compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_Get_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_RecreateInstances_sync", "segments": [ { "end": 53, @@ -48233,28 +52329,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_get_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.insert", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.resize", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.Insert", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Resize", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Insert" + "shortName": "Resize" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionHealthSourceRequest" + "type": "google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48265,8 +52361,12 @@ "type": "str" }, { - "name": "health_source_resource", - "type": "google.cloud.compute_v1.types.HealthSource" + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "size", + "type": "int" }, { "name": "retry", @@ -48282,21 +52382,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "resize" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_health_sources_insert_sync.py", + "description": "Sample for Resize", + "file": "compute_v1_generated_region_instance_group_managers_resize_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_Insert_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Resize_sync", "segments": [ { - "end": 52, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 54, "start": 27, "type": "SHORT" }, @@ -48306,43 +52406,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 48, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 51, + "start": 49, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 55, + "start": 52, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_insert_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_resize_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.list", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.resume_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.List", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ResumeInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "List" + "shortName": "ResumeInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionHealthSourcesRequest" + "type": "google.cloud.compute_v1.types.ResumeInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48352,6 +52452,14 @@ "name": "region", "type": "str" }, + { + "name": "instance_group_manager", + "type": "str" + }, + { + "name": "region_instance_group_managers_resume_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersResumeInstancesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -48365,14 +52473,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_health_sources.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "resume_instances" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_health_sources_list_sync.py", + "description": "Sample for ResumeInstances", + "file": "compute_v1_generated_region_instance_group_managers_resume_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_List_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ResumeInstances_sync", "segments": [ { "end": 53, @@ -48390,43 +52498,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_list_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_resume_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.patch", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.set_instance_template", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.Patch", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SetInstanceTemplate", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "Patch" + "shortName": "SetInstanceTemplate" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionHealthSourceRequest" + "type": "google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48437,12 +52545,12 @@ "type": "str" }, { - "name": "health_source", + "name": "instance_group_manager", "type": "str" }, { - "name": "health_source_resource", - "type": "google.cloud.compute_v1.types.HealthSource" + "name": "region_instance_group_managers_set_template_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest" }, { "name": "retry", @@ -48458,13 +52566,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "set_instance_template" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_health_sources_patch_sync.py", + "description": "Sample for SetInstanceTemplate", + "file": "compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_Patch_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SetInstanceTemplate_sync", "segments": [ { "end": 53, @@ -48497,28 +52605,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_patch_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient", - "shortName": "RegionHealthSourcesClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", + "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionHealthSourcesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.set_target_pools", "method": { - "fullName": "google.cloud.compute.v1.RegionHealthSources.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SetTargetPools", "service": { - "fullName": "google.cloud.compute.v1.RegionHealthSources", - "shortName": "RegionHealthSources" + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", + "shortName": "RegionInstanceGroupManagers" }, - "shortName": "TestIamPermissions" + "shortName": "SetTargetPools" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionHealthSourceRequest" + "type": "google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48529,12 +52637,12 @@ "type": "str" }, { - "name": "resource", + "name": "instance_group_manager", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "region_instance_group_managers_set_target_pools_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTargetPoolsRequest" }, { "name": "retry", @@ -48549,14 +52657,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_target_pools" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_health_sources_test_iam_permissions_sync.py", + "description": "Sample for SetTargetPools", + "file": "compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionHealthSources_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SetTargetPools_sync", "segments": [ { "end": 53, @@ -48589,7 +52697,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_health_sources_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py" }, { "canonical": true, @@ -48598,19 +52706,19 @@ "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.abandon_instances", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.start_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.AbandonInstances", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.StartInstances", "service": { "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", "shortName": "RegionInstanceGroupManagers" }, - "shortName": "AbandonInstances" + "shortName": "StartInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AbandonInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.StartInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48625,8 +52733,8 @@ "type": "str" }, { - "name": "region_instance_group_managers_abandon_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersAbandonInstancesRequest" + "name": "region_instance_group_managers_start_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersStartInstancesRequest" }, { "name": "retry", @@ -48642,13 +52750,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "abandon_instances" + "shortName": "start_instances" }, - "description": "Sample for AbandonInstances", - "file": "compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py", + "description": "Sample for StartInstances", + "file": "compute_v1_generated_region_instance_group_managers_start_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_AbandonInstances_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_StartInstances_sync", "segments": [ { "end": 53, @@ -48681,7 +52789,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_abandon_instances_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_start_instances_sync.py" }, { "canonical": true, @@ -48690,19 +52798,19 @@ "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.apply_updates_to_instances", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.stop_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ApplyUpdatesToInstances", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.StopInstances", "service": { "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", "shortName": "RegionInstanceGroupManagers" }, - "shortName": "ApplyUpdatesToInstances" + "shortName": "StopInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ApplyUpdatesToInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.StopInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48717,8 +52825,8 @@ "type": "str" }, { - "name": "region_instance_group_managers_apply_updates_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersApplyUpdatesRequest" + "name": "region_instance_group_managers_stop_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersStopInstancesRequest" }, { "name": "retry", @@ -48734,13 +52842,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "apply_updates_to_instances" + "shortName": "stop_instances" }, - "description": "Sample for ApplyUpdatesToInstances", - "file": "compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py", + "description": "Sample for StopInstances", + "file": "compute_v1_generated_region_instance_group_managers_stop_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ApplyUpdatesToInstances_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_StopInstances_sync", "segments": [ { "end": 53, @@ -48773,7 +52881,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_apply_updates_to_instances_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_stop_instances_sync.py" }, { "canonical": true, @@ -48782,19 +52890,19 @@ "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.create_instances", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.suspend_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.CreateInstances", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SuspendInstances", "service": { "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", "shortName": "RegionInstanceGroupManagers" }, - "shortName": "CreateInstances" + "shortName": "SuspendInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CreateInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.SuspendInstancesRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48809,8 +52917,8 @@ "type": "str" }, { - "name": "region_instance_group_managers_create_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersCreateInstancesRequest" + "name": "region_instance_group_managers_suspend_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSuspendInstancesRequest" }, { "name": "retry", @@ -48826,13 +52934,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "create_instances" + "shortName": "suspend_instances" }, - "description": "Sample for CreateInstances", - "file": "compute_v1_generated_region_instance_group_managers_create_instances_sync.py", + "description": "Sample for SuspendInstances", + "file": "compute_v1_generated_region_instance_group_managers_suspend_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_CreateInstances_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SuspendInstances_sync", "segments": [ { "end": 53, @@ -48865,7 +52973,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_create_instances_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_suspend_instances_sync.py" }, { "canonical": true, @@ -48874,19 +52982,19 @@ "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", "shortName": "RegionInstanceGroupManagersClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete_instances", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.update_per_instance_configs", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.DeleteInstances", + "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.UpdatePerInstanceConfigs", "service": { "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", "shortName": "RegionInstanceGroupManagers" }, - "shortName": "DeleteInstances" + "shortName": "UpdatePerInstanceConfigs" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest" }, { "name": "project", @@ -48901,8 +53009,8 @@ "type": "str" }, { - "name": "region_instance_group_managers_delete_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersDeleteInstancesRequest" + "name": "region_instance_group_manager_update_instance_config_req_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerUpdateInstanceConfigReq" }, { "name": "retry", @@ -48918,13 +53026,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete_instances" + "shortName": "update_per_instance_configs" }, - "description": "Sample for DeleteInstances", - "file": "compute_v1_generated_region_instance_group_managers_delete_instances_sync.py", + "description": "Sample for UpdatePerInstanceConfigs", + "file": "compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_DeleteInstances_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_UpdatePerInstanceConfigs_sync", "segments": [ { "end": 53, @@ -48957,28 +53065,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_delete_instances_sync.py" + "title": "compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete_per_instance_configs", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.DeletePerInstanceConfigs", + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" }, - "shortName": "DeletePerInstanceConfigs" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeletePerInstanceConfigsRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupRequest" }, { "name": "project", @@ -48989,13 +53097,9 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instance_group", "type": "str" }, - { - "name": "region_instance_group_manager_delete_instance_config_req_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerDeleteInstanceConfigReq" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49009,14 +53113,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete_per_instance_configs" + "resultType": "google.cloud.compute_v1.types.InstanceGroup", + "shortName": "get" }, - "description": "Sample for DeletePerInstanceConfigs", - "file": "compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_instance_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_DeletePerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroups_Get_sync", "segments": [ { "end": 53, @@ -49049,28 +53153,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_delete_per_instance_configs_sync.py" + "title": "compute_v1_generated_region_instance_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.delete", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.list_instances", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Delete", + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.ListInstances", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" }, - "shortName": "Delete" + "shortName": "ListInstances" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest" }, { "name": "project", @@ -49081,9 +53185,13 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instance_group", "type": "str" }, + { + "name": "region_instance_groups_list_instances_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49097,22 +53205,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.services.region_instance_groups.pagers.ListInstancesPager", + "shortName": "list_instances" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_instance_group_managers_delete_sync.py", + "description": "Sample for ListInstances", + "file": "compute_v1_generated_region_instance_groups_list_instances_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Delete_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroups_ListInstances_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -49132,33 +53240,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_delete_sync.py" + "title": "compute_v1_generated_region_instance_groups_list_instances_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.get", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Get", + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.List", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" }, - "shortName": "Get" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest" }, { "name": "project", @@ -49168,10 +53276,6 @@ "name": "region", "type": "str" }, - { - "name": "instance_group_manager", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49185,14 +53289,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InstanceGroupManager", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.region_instance_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_instance_group_managers_get_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Get_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroups_List_sync", "segments": [ { "end": 53, @@ -49210,43 +53314,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_get_sync.py" + "title": "compute_v1_generated_region_instance_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.insert", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.set_named_ports", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Insert", + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.SetNamedPorts", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" }, - "shortName": "Insert" + "shortName": "SetNamedPorts" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest" }, { "name": "project", @@ -49257,8 +53361,12 @@ "type": "str" }, { - "name": "instance_group_manager_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManager" + "name": "instance_group", + "type": "str" + }, + { + "name": "region_instance_groups_set_named_ports_request_resource", + "type": "google.cloud.compute_v1.types.RegionInstanceGroupsSetNamedPortsRequest" }, { "name": "retry", @@ -49274,21 +53382,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "set_named_ports" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_instance_group_managers_insert_sync.py", + "description": "Sample for SetNamedPorts", + "file": "compute_v1_generated_region_instance_groups_set_named_ports_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Insert_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroups_SetNamedPorts_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -49298,43 +53406,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_insert_sync.py" + "title": "compute_v1_generated_region_instance_groups_set_named_ports_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", + "shortName": "RegionInstanceGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_errors", + "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListErrors", + "fullName": "google.cloud.compute.v1.RegionInstanceGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceGroups", + "shortName": "RegionInstanceGroups" }, - "shortName": "ListErrors" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListErrorsRegionInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionInstanceGroupRequest" }, { "name": "project", @@ -49345,9 +53453,13 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49361,22 +53473,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListErrorsPager", - "shortName": "list_errors" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for ListErrors", - "file": "compute_v1_generated_region_instance_group_managers_list_errors_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_instance_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListErrors_sync", + "regionTag": "compute_v1_generated_RegionInstanceGroups_TestIamPermissions_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -49396,33 +53508,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_list_errors_sync.py" + "title": "compute_v1_generated_region_instance_groups_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_managed_instances", + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListManagedInstances", + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" }, - "shortName": "ListManagedInstances" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionInstanceTemplateRequest" }, { "name": "project", @@ -49433,7 +53545,7 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instance_template", "type": "str" }, { @@ -49449,22 +53561,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListManagedInstancesPager", - "shortName": "list_managed_instances" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for ListManagedInstances", - "file": "compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_instance_templates_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListManagedInstances_sync", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_Delete_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -49484,33 +53596,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_list_managed_instances_sync.py" + "title": "compute_v1_generated_region_instance_templates_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list_per_instance_configs", + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ListPerInstanceConfigs", + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" }, - "shortName": "ListPerInstanceConfigs" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListPerInstanceConfigsRegionInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.GetRegionInstanceTemplateRequest" }, { "name": "project", @@ -49521,7 +53633,7 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instance_template", "type": "str" }, { @@ -49537,22 +53649,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPerInstanceConfigsPager", - "shortName": "list_per_instance_configs" + "resultType": "google.cloud.compute_v1.types.InstanceTemplate", + "shortName": "get" }, - "description": "Sample for ListPerInstanceConfigs", - "file": "compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_instance_templates_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ListPerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_Get_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -49572,33 +53684,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_list_per_instance_configs_sync.py" + "title": "compute_v1_generated_region_instance_templates_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.list", + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.List", + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" }, - "shortName": "List" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupManagersRequest" + "type": "google.cloud.compute_v1.types.InsertRegionInstanceTemplateRequest" }, { "name": "project", @@ -49608,6 +53720,10 @@ "name": "region", "type": "str" }, + { + "name": "instance_template_resource", + "type": "google.cloud.compute_v1.types.InstanceTemplate" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49621,22 +53737,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_group_managers.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_instance_group_managers_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_instance_templates_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_List_sync", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -49656,33 +53772,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_list_sync.py" + "title": "compute_v1_generated_region_instance_templates_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", + "shortName": "RegionInstanceTemplatesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.patch_per_instance_configs", + "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.PatchPerInstanceConfigs", + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.List", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", + "shortName": "RegionInstanceTemplates" }, - "shortName": "PatchPerInstanceConfigs" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchPerInstanceConfigsRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListRegionInstanceTemplatesRequest" }, { "name": "project", @@ -49692,14 +53808,6 @@ "name": "region", "type": "str" }, - { - "name": "instance_group_manager", - "type": "str" - }, - { - "name": "region_instance_group_manager_patch_instance_config_req_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerPatchInstanceConfigReq" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49713,14 +53821,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_per_instance_configs" + "resultType": "google.cloud.compute_v1.services.region_instance_templates.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for PatchPerInstanceConfigs", - "file": "compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_instance_templates_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_PatchPerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_RegionInstanceTemplates_List_sync", "segments": [ { "end": 53, @@ -49738,43 +53846,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_patch_per_instance_configs_sync.py" + "title": "compute_v1_generated_region_instance_templates_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstancesClient", + "shortName": "RegionInstancesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.patch", + "fullName": "google.cloud.compute_v1.RegionInstancesClient.bulk_insert", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Patch", + "fullName": "google.cloud.compute.v1.RegionInstances.BulkInsert", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstances", + "shortName": "RegionInstances" }, - "shortName": "Patch" + "shortName": "BulkInsert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest" }, { "name": "project", @@ -49785,12 +53893,8 @@ "type": "str" }, { - "name": "instance_group_manager", - "type": "str" - }, - { - "name": "instance_group_manager_resource", - "type": "google.cloud.compute_v1.types.InstanceGroupManager" + "name": "bulk_insert_instance_resource_resource", + "type": "google.cloud.compute_v1.types.BulkInsertInstanceResource" }, { "name": "retry", @@ -49806,21 +53910,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "bulk_insert" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_instance_group_managers_patch_sync.py", + "description": "Sample for BulkInsert", + "file": "compute_v1_generated_region_instances_bulk_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Patch_sync", + "regionTag": "compute_v1_generated_RegionInstances_BulkInsert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -49830,43 +53934,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_patch_sync.py" + "title": "compute_v1_generated_region_instances_bulk_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.recreate_instances", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.RecreateInstances", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "RecreateInstances" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RecreateInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionInstantSnapshotGroupRequest" }, { "name": "project", @@ -49877,13 +53981,9 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instant_snapshot_group", "type": "str" }, - { - "name": "region_instance_group_managers_recreate_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersRecreateRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -49898,13 +53998,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "recreate_instances" + "shortName": "delete" }, - "description": "Sample for RecreateInstances", - "file": "compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_instant_snapshot_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_RecreateInstances_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_Delete_sync", "segments": [ { "end": 53, @@ -49937,120 +54037,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_recreate_instances_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" - }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.resize", - "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.Resize", - "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" - }, - "shortName": "Resize" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.ResizeRegionInstanceGroupManagerRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "region", - "type": "str" - }, - { - "name": "instance_group_manager", - "type": "str" - }, - { - "name": "size", - "type": "int" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "resize" - }, - "description": "Sample for Resize", - "file": "compute_v1_generated_region_instance_group_managers_resize_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_Resize_sync", - "segments": [ - { - "end": 54, - "start": 27, - "type": "FULL" - }, - { - "end": 54, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 48, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 51, - "start": 49, - "type": "REQUEST_EXECUTION" - }, - { - "end": 55, - "start": 52, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_region_instance_group_managers_resize_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.resume_instances", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.ResumeInstances", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "ResumeInstances" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ResumeInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionInstantSnapshotGroupRequest" }, { "name": "project", @@ -50061,13 +54069,9 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "resource", "type": "str" }, - { - "name": "region_instance_group_managers_resume_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersResumeInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50081,14 +54085,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "resume_instances" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for ResumeInstances", - "file": "compute_v1_generated_region_instance_group_managers_resume_instances_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_instant_snapshot_groups_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_ResumeInstances_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -50121,28 +54125,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_resume_instances_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.set_instance_template", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SetInstanceTemplate", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "SetInstanceTemplate" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetInstanceTemplateRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.GetRegionInstantSnapshotGroupRequest" }, { "name": "project", @@ -50153,13 +54157,9 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instant_snapshot_group", "type": "str" }, - { - "name": "region_instance_group_managers_set_template_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTemplateRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50173,14 +54173,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_instance_template" + "resultType": "google.cloud.compute_v1.types.InstantSnapshotGroup", + "shortName": "get" }, - "description": "Sample for SetInstanceTemplate", - "file": "compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_instant_snapshot_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SetInstanceTemplate_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_Get_sync", "segments": [ { "end": 53, @@ -50213,28 +54213,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_set_instance_template_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.set_target_pools", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SetTargetPools", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "SetTargetPools" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetTargetPoolsRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.InsertRegionInstantSnapshotGroupRequest" }, { "name": "project", @@ -50245,12 +54245,8 @@ "type": "str" }, { - "name": "instance_group_manager", - "type": "str" - }, - { - "name": "region_instance_group_managers_set_target_pools_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSetTargetPoolsRequest" + "name": "instant_snapshot_group_resource", + "type": "google.cloud.compute_v1.types.InstantSnapshotGroup" }, { "name": "retry", @@ -50266,21 +54262,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_target_pools" + "shortName": "insert" }, - "description": "Sample for SetTargetPools", - "file": "compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_instant_snapshot_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SetTargetPools_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -50290,43 +54286,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_set_target_pools_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.start_instances", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.StartInstances", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.List", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "StartInstances" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StartInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.ListRegionInstantSnapshotGroupsRequest" }, { "name": "project", @@ -50336,14 +54332,6 @@ "name": "region", "type": "str" }, - { - "name": "instance_group_manager", - "type": "str" - }, - { - "name": "region_instance_group_managers_start_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersStartInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50357,14 +54345,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "start_instances" + "resultType": "google.cloud.compute_v1.services.region_instant_snapshot_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for StartInstances", - "file": "compute_v1_generated_region_instance_group_managers_start_instances_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_instant_snapshot_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_StartInstances_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_List_sync", "segments": [ { "end": 53, @@ -50382,43 +54370,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_start_instances_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.stop_instances", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.StopInstances", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "StopInstances" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.StopInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionInstantSnapshotGroupRequest" }, { "name": "project", @@ -50429,12 +54417,12 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "resource", "type": "str" }, { - "name": "region_instance_group_managers_stop_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersStopInstancesRequest" + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" }, { "name": "retry", @@ -50449,14 +54437,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "stop_instances" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for StopInstances", - "file": "compute_v1_generated_region_instance_group_managers_stop_instances_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_instant_snapshot_groups_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_StopInstances_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -50489,28 +54477,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_stop_instances_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient", + "shortName": "RegionInstantSnapshotGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.suspend_instances", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotGroupsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.SuspendInstances", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshotGroups", + "shortName": "RegionInstantSnapshotGroups" }, - "shortName": "SuspendInstances" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SuspendInstancesRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionInstantSnapshotGroupRequest" }, { "name": "project", @@ -50521,12 +54509,12 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "resource", "type": "str" }, { - "name": "region_instance_group_managers_suspend_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagersSuspendInstancesRequest" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -50541,14 +54529,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "suspend_instances" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for SuspendInstances", - "file": "compute_v1_generated_region_instance_group_managers_suspend_instances_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_instant_snapshot_groups_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_SuspendInstances_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshotGroups_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -50581,28 +54569,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_suspend_instances_sync.py" + "title": "compute_v1_generated_region_instant_snapshot_groups_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient", - "shortName": "RegionInstanceGroupManagersClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupManagersClient.update_per_instance_configs", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers.UpdatePerInstanceConfigs", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroupManagers", - "shortName": "RegionInstanceGroupManagers" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "UpdatePerInstanceConfigs" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.UpdatePerInstanceConfigsRegionInstanceGroupManagerRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionInstantSnapshotRequest" }, { "name": "project", @@ -50613,13 +54601,9 @@ "type": "str" }, { - "name": "instance_group_manager", + "name": "instant_snapshot", "type": "str" }, - { - "name": "region_instance_group_manager_update_instance_config_req_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupManagerUpdateInstanceConfigReq" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50634,13 +54618,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "update_per_instance_configs" + "shortName": "delete" }, - "description": "Sample for UpdatePerInstanceConfigs", - "file": "compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_instant_snapshots_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroupManagers_UpdatePerInstanceConfigs_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_Delete_sync", "segments": [ { "end": 53, @@ -50673,28 +54657,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_group_managers_update_per_instance_configs_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", - "shortName": "RegionInstanceGroupsClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.get", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups.Get", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups", - "shortName": "RegionInstanceGroups" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "Get" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionInstantSnapshotRequest" }, { "name": "project", @@ -50705,7 +54689,7 @@ "type": "str" }, { - "name": "instance_group", + "name": "resource", "type": "str" }, { @@ -50721,14 +54705,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InstanceGroup", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_instance_groups_get_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_instant_snapshots_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroups_Get_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -50761,28 +54745,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_groups_get_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", - "shortName": "RegionInstanceGroupsClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.list_instances", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups.ListInstances", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups", - "shortName": "RegionInstanceGroups" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "ListInstances" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListInstancesRegionInstanceGroupsRequest" + "type": "google.cloud.compute_v1.types.GetRegionInstantSnapshotRequest" }, { "name": "project", @@ -50793,13 +54777,9 @@ "type": "str" }, { - "name": "instance_group", + "name": "instant_snapshot", "type": "str" }, - { - "name": "region_instance_groups_list_instances_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupsListInstancesRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50813,22 +54793,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_groups.pagers.ListInstancesPager", - "shortName": "list_instances" + "resultType": "google.cloud.compute_v1.types.InstantSnapshot", + "shortName": "get" }, - "description": "Sample for ListInstances", - "file": "compute_v1_generated_region_instance_groups_list_instances_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_instant_snapshots_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroups_ListInstances_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_Get_sync", "segments": [ { - "end": 54, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 53, "start": 27, "type": "SHORT" }, @@ -50848,33 +54828,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 55, + "end": 54, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_groups_list_instances_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", - "shortName": "RegionInstanceGroupsClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.list", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups.List", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups", - "shortName": "RegionInstanceGroups" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "List" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionInstanceGroupsRequest" + "type": "google.cloud.compute_v1.types.InsertRegionInstantSnapshotRequest" }, { "name": "project", @@ -50884,6 +54864,10 @@ "name": "region", "type": "str" }, + { + "name": "instant_snapshot_resource", + "type": "google.cloud.compute_v1.types.InstantSnapshot" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50897,22 +54881,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_groups.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_instance_groups_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_instant_snapshots_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroups_List_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -50932,33 +54916,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_groups_list_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", - "shortName": "RegionInstanceGroupsClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.set_named_ports", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups.SetNamedPorts", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.List", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups", - "shortName": "RegionInstanceGroups" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "SetNamedPorts" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetNamedPortsRegionInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.ListRegionInstantSnapshotsRequest" }, { "name": "project", @@ -50968,14 +54952,6 @@ "name": "region", "type": "str" }, - { - "name": "instance_group", - "type": "str" - }, - { - "name": "region_instance_groups_set_named_ports_request_resource", - "type": "google.cloud.compute_v1.types.RegionInstanceGroupsSetNamedPortsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -50989,14 +54965,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_named_ports" + "resultType": "google.cloud.compute_v1.services.region_instant_snapshots.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for SetNamedPorts", - "file": "compute_v1_generated_region_instance_groups_set_named_ports_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_instant_snapshots_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroups_SetNamedPorts_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_List_sync", "segments": [ { "end": 53, @@ -51014,43 +54990,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_groups_set_named_ports_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient", - "shortName": "RegionInstanceGroupsClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceGroupsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceGroups", - "shortName": "RegionInstanceGroups" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "TestIamPermissions" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionInstanceGroupRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionInstantSnapshotRequest" }, { "name": "project", @@ -51065,8 +55041,8 @@ "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" }, { "name": "retry", @@ -51081,14 +55057,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_instance_groups_test_iam_permissions_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_instant_snapshots_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceGroups_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -51121,28 +55097,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_groups_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", - "shortName": "RegionInstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.delete", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Delete", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", - "shortName": "RegionInstanceTemplates" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "Delete" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.SetLabelsRegionInstantSnapshotRequest" }, { "name": "project", @@ -51153,9 +55129,13 @@ "type": "str" }, { - "name": "instance_template", + "name": "resource", "type": "str" }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -51170,13 +55150,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "set_labels" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_instance_templates_delete_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_region_instant_snapshots_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceTemplates_Delete_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_SetLabels_sync", "segments": [ { "end": 53, @@ -51209,28 +55189,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_templates_delete_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", - "shortName": "RegionInstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", + "shortName": "RegionInstantSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.get", + "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Get", + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", - "shortName": "RegionInstanceTemplates" + "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", + "shortName": "RegionInstantSnapshots" }, - "shortName": "Get" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionInstantSnapshotRequest" }, { "name": "project", @@ -51241,9 +55221,13 @@ "type": "str" }, { - "name": "instance_template", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -51257,14 +55241,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InstanceTemplate", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_instance_templates_get_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_instant_snapshots_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceTemplates_Get_sync", + "regionTag": "compute_v1_generated_RegionInstantSnapshots_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -51297,28 +55281,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_templates_get_sync.py" + "title": "compute_v1_generated_region_instant_snapshots_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", - "shortName": "RegionInstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.insert", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.attach_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.Insert", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.AttachNetworkEndpoints", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", - "shortName": "RegionInstanceTemplates" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "Insert" + "shortName": "AttachNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionInstanceTemplateRequest" + "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest" }, { "name": "project", @@ -51329,8 +55313,12 @@ "type": "str" }, { - "name": "instance_template_resource", - "type": "google.cloud.compute_v1.types.InstanceTemplate" + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "region_network_endpoint_groups_attach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.RegionNetworkEndpointGroupsAttachEndpointsRequest" }, { "name": "retry", @@ -51346,21 +55334,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "shortName": "attach_network_endpoints" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_instance_templates_insert_sync.py", + "description": "Sample for AttachNetworkEndpoints", + "file": "compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceTemplates_Insert_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_AttachNetworkEndpoints_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -51370,43 +55358,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_templates_insert_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient", - "shortName": "RegionInstanceTemplatesClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstanceTemplatesClient.list", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates.List", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionInstanceTemplates", - "shortName": "RegionInstanceTemplates" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "List" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionInstanceTemplatesRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest" }, { "name": "project", @@ -51416,6 +55404,10 @@ "name": "region", "type": "str" }, + { + "name": "network_endpoint_group", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -51429,14 +55421,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instance_templates.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_instance_templates_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstanceTemplates_List_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync", "segments": [ { "end": 53, @@ -51454,43 +55446,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instance_templates_list_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstancesClient", - "shortName": "RegionInstancesClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstancesClient.bulk_insert", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.detach_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.RegionInstances.BulkInsert", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.DetachNetworkEndpoints", "service": { - "fullName": "google.cloud.compute.v1.RegionInstances", - "shortName": "RegionInstances" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "BulkInsert" + "shortName": "DetachNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.BulkInsertRegionInstanceRequest" + "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest" }, { "name": "project", @@ -51501,8 +55493,12 @@ "type": "str" }, { - "name": "bulk_insert_instance_resource_resource", - "type": "google.cloud.compute_v1.types.BulkInsertInstanceResource" + "name": "network_endpoint_group", + "type": "str" + }, + { + "name": "region_network_endpoint_groups_detach_endpoints_request_resource", + "type": "google.cloud.compute_v1.types.RegionNetworkEndpointGroupsDetachEndpointsRequest" }, { "name": "retry", @@ -51518,21 +55514,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "bulk_insert" + "shortName": "detach_network_endpoints" }, - "description": "Sample for BulkInsert", - "file": "compute_v1_generated_region_instances_bulk_insert_sync.py", + "description": "Sample for DetachNetworkEndpoints", + "file": "compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstances_BulkInsert_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_DetachNetworkEndpoints_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -51542,43 +55538,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instances_bulk_insert_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.delete", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.Delete", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "Delete" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest" }, { "name": "project", @@ -51589,7 +55585,7 @@ "type": "str" }, { - "name": "instant_snapshot", + "name": "network_endpoint_group", "type": "str" }, { @@ -51605,14 +55601,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", + "shortName": "get" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_instant_snapshots_delete_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_network_endpoint_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_Delete_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Get_sync", "segments": [ { "end": 53, @@ -51645,28 +55641,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_delete_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.GetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "GetIamPolicy" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest" }, { "name": "project", @@ -51677,8 +55673,8 @@ "type": "str" }, { - "name": "resource", - "type": "str" + "name": "network_endpoint_group_resource", + "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" }, { "name": "retry", @@ -51693,22 +55689,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_region_instant_snapshots_get_iam_policy_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -51718,43 +55714,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_get_iam_policy_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.get", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.list_network_endpoints", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.Get", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.ListNetworkEndpoints", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "Get" + "shortName": "ListNetworkEndpoints" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest" }, { "name": "project", @@ -51765,7 +55761,7 @@ "type": "str" }, { - "name": "instant_snapshot", + "name": "network_endpoint_group", "type": "str" }, { @@ -51781,22 +55777,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.InstantSnapshot", - "shortName": "get" + "resultType": "google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListNetworkEndpointsPager", + "shortName": "list_network_endpoints" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_instant_snapshots_get_sync.py", + "description": "Sample for ListNetworkEndpoints", + "file": "compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_Get_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_ListNetworkEndpoints_sync", "segments": [ { - "end": 53, + "end": 54, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 54, "start": 27, "type": "SHORT" }, @@ -51816,33 +55812,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 55, "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_get_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", + "shortName": "RegionNetworkEndpointGroupsClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.insert", + "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.Insert", + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.List", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", + "shortName": "RegionNetworkEndpointGroups" }, - "shortName": "Insert" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest" }, { "name": "project", @@ -51852,10 +55848,6 @@ "name": "region", "type": "str" }, - { - "name": "instant_snapshot_resource", - "type": "google.cloud.compute_v1.types.InstantSnapshot" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -51869,22 +55861,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_instant_snapshots_insert_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_network_endpoint_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_Insert_sync", + "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -51904,33 +55896,33 @@ "type": "REQUEST_EXECUTION" }, { - "end": 53, + "end": 54, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_insert_sync.py" + "title": "compute_v1_generated_region_network_endpoint_groups_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.list", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.add_association", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.List", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.AddAssociation", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "List" + "shortName": "AddAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionInstantSnapshotsRequest" + "type": "google.cloud.compute_v1.types.AddAssociationRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -51940,6 +55932,14 @@ "name": "region", "type": "str" }, + { + "name": "firewall_policy", + "type": "str" + }, + { + "name": "firewall_policy_association_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -51953,14 +55953,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_instant_snapshots.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_association" }, - "description": "Sample for List", - "file": "compute_v1_generated_region_instant_snapshots_list_sync.py", + "description": "Sample for AddAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_add_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_List_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_AddAssociation_sync", "segments": [ { "end": 53, @@ -51978,43 +55978,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 50, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_list_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_add_association_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.add_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.SetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.AddRule", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "SetIamPolicy" + "shortName": "AddRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.AddRuleRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52025,12 +56025,12 @@ "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" }, { "name": "retry", @@ -52045,14 +56045,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_region_instant_snapshots_set_iam_policy_sync.py", + "description": "Sample for AddRule", + "file": "compute_v1_generated_region_network_firewall_policies_add_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_AddRule_sync", "segments": [ { "end": 53, @@ -52085,28 +56085,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_set_iam_policy_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_add_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.set_labels", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.clone_rules", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.SetLabels", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.CloneRules", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "SetLabels" + "shortName": "CloneRules" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.CloneRulesRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52117,13 +56117,9 @@ "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, - { - "name": "region_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -52138,13 +56134,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "shortName": "clone_rules" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_region_instant_snapshots_set_labels_sync.py", + "description": "Sample for CloneRules", + "file": "compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_SetLabels_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_CloneRules_sync", "segments": [ { "end": 53, @@ -52177,28 +56173,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_set_labels_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient", - "shortName": "RegionInstantSnapshotsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionInstantSnapshotsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionInstantSnapshots", - "shortName": "RegionInstantSnapshots" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "TestIamPermissions" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionInstantSnapshotRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52209,13 +56205,9 @@ "type": "str" }, { - "name": "resource", + "name": "firewall_policy", "type": "str" }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -52229,14 +56221,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_instant_snapshots_test_iam_permissions_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_network_firewall_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionInstantSnapshots_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Delete_sync", "segments": [ { "end": 53, @@ -52269,28 +56261,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_instant_snapshots_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.attach_network_endpoints", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_association", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.AttachNetworkEndpoints", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetAssociation", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "AttachNetworkEndpoints" + "shortName": "GetAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AttachNetworkEndpointsRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.GetAssociationRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52301,13 +56293,9 @@ "type": "str" }, { - "name": "network_endpoint_group", + "name": "firewall_policy", "type": "str" }, - { - "name": "region_network_endpoint_groups_attach_endpoints_request_resource", - "type": "google.cloud.compute_v1.types.RegionNetworkEndpointGroupsAttachEndpointsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -52321,14 +56309,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "attach_network_endpoints" + "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", + "shortName": "get_association" }, - "description": "Sample for AttachNetworkEndpoints", - "file": "compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py", + "description": "Sample for GetAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_get_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_AttachNetworkEndpoints_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetAssociation_sync", "segments": [ { "end": 53, @@ -52361,28 +56349,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_attach_network_endpoints_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_association_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.delete", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_effective_firewalls", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Delete", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetEffectiveFirewalls", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Delete" + "shortName": "GetEffectiveFirewalls" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52393,7 +56381,7 @@ "type": "str" }, { - "name": "network_endpoint_group", + "name": "network", "type": "str" }, { @@ -52409,14 +56397,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", + "shortName": "get_effective_firewalls" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py", + "description": "Sample for GetEffectiveFirewalls", + "file": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Delete_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetEffectiveFirewalls_sync", "segments": [ { "end": 53, @@ -52449,28 +56437,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_delete_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.detach_network_endpoints", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.DetachNetworkEndpoints", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "DetachNetworkEndpoints" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DetachNetworkEndpointsRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52481,13 +56469,9 @@ "type": "str" }, { - "name": "network_endpoint_group", + "name": "resource", "type": "str" }, - { - "name": "region_network_endpoint_groups_detach_endpoints_request_resource", - "type": "google.cloud.compute_v1.types.RegionNetworkEndpointGroupsDetachEndpointsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -52501,14 +56485,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "detach_network_endpoints" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for DetachNetworkEndpoints", - "file": "compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_DetachNetworkEndpoints_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -52541,28 +56525,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_detach_network_endpoints_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.get", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Get", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetRule", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Get" + "shortName": "GetRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.GetRuleRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52573,7 +56557,7 @@ "type": "str" }, { - "name": "network_endpoint_group", + "name": "firewall_policy", "type": "str" }, { @@ -52589,14 +56573,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NetworkEndpointGroup", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", + "shortName": "get_rule" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_network_endpoint_groups_get_sync.py", + "description": "Sample for GetRule", + "file": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Get_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync", "segments": [ { "end": 53, @@ -52629,28 +56613,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_get_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.insert", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.Insert", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionNetworkEndpointGroupRequest" + "type": "google.cloud.compute_v1.types.GetRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52661,8 +56645,8 @@ "type": "str" }, { - "name": "network_endpoint_group_resource", - "type": "google.cloud.compute_v1.types.NetworkEndpointGroup" + "name": "firewall_policy", + "type": "str" }, { "name": "retry", @@ -52677,22 +56661,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_network_firewall_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_Insert_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -52702,43 +56686,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_insert_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.list_network_endpoints", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.ListNetworkEndpoints", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "ListNetworkEndpoints" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListNetworkEndpointsRegionNetworkEndpointGroupsRequest" + "type": "google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52749,8 +56733,8 @@ "type": "str" }, { - "name": "network_endpoint_group", - "type": "str" + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" }, { "name": "retry", @@ -52765,22 +56749,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListNetworkEndpointsPager", - "shortName": "list_network_endpoints" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for ListNetworkEndpoints", - "file": "compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_region_network_firewall_policies_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_ListNetworkEndpoints_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync", "segments": [ { - "end": 54, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 54, + "end": 52, "start": 27, "type": "SHORT" }, @@ -52790,43 +56774,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 55, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_list_network_endpoints_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient", - "shortName": "RegionNetworkEndpointGroupsClient" + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", + "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkEndpointGroupsClient.list", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups.List", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.List", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkEndpointGroups", - "shortName": "RegionNetworkEndpointGroups" + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", + "shortName": "RegionNetworkFirewallPolicies" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionNetworkEndpointGroupsRequest" + "type": "google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest" }, { "name": "project", @@ -52849,14 +56833,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_network_endpoint_groups.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_network_firewall_policies.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_network_endpoint_groups_list_sync.py", + "file": "compute_v1_generated_region_network_firewall_policies_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkEndpointGroups_List_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_List_sync", "segments": [ { "end": 53, @@ -52889,7 +56873,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_endpoint_groups_list_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_list_sync.py" }, { "canonical": true, @@ -52898,19 +56882,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.add_association", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.AddAssociation", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.PatchRule", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "AddAssociation" + "shortName": "PatchRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddAssociationRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -52925,8 +56909,8 @@ "type": "str" }, { - "name": "firewall_policy_association_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyAssociation" + "name": "firewall_policy_rule_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicyRule" }, { "name": "retry", @@ -52942,13 +56926,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_association" + "shortName": "patch_rule" }, - "description": "Sample for AddAssociation", - "file": "compute_v1_generated_region_network_firewall_policies_add_association_sync.py", + "description": "Sample for PatchRule", + "file": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_AddAssociation_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync", "segments": [ { "end": 53, @@ -52981,7 +56965,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_add_association_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py" }, { "canonical": true, @@ -52990,19 +56974,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.add_rule", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.AddRule", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Patch", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "AddRule" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -53017,8 +57001,8 @@ "type": "str" }, { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + "name": "firewall_policy_resource", + "type": "google.cloud.compute_v1.types.FirewallPolicy" }, { "name": "retry", @@ -53034,13 +57018,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_rule" + "shortName": "patch" }, - "description": "Sample for AddRule", - "file": "compute_v1_generated_region_network_firewall_policies_add_rule_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_network_firewall_policies_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_AddRule_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync", "segments": [ { "end": 53, @@ -53073,7 +57057,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_add_rule_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_patch_sync.py" }, { "canonical": true, @@ -53082,19 +57066,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.clone_rules", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_association", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.CloneRules", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveAssociation", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "CloneRules" + "shortName": "RemoveAssociation" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.CloneRulesRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -53122,13 +57106,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "clone_rules" + "shortName": "remove_association" }, - "description": "Sample for CloneRules", - "file": "compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py", + "description": "Sample for RemoveAssociation", + "file": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_CloneRules_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync", "segments": [ { "end": 53, @@ -53161,7 +57145,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_clone_rules_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py" }, { "canonical": true, @@ -53170,19 +57154,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Delete", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveRule", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "Delete" + "shortName": "RemoveRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -53210,13 +57194,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "remove_rule" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_network_firewall_policies_delete_sync.py", + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Delete_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync", "segments": [ { "end": 53, @@ -53249,7 +57233,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_delete_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py" }, { "canonical": true, @@ -53258,19 +57242,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_association", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetAssociation", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.SetIamPolicy", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "GetAssociation" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetAssociationRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -53281,9 +57265,13 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "resource", "type": "str" }, + { + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -53297,14 +57285,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicyAssociation", - "shortName": "get_association" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for GetAssociation", - "file": "compute_v1_generated_region_network_firewall_policies_get_association_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetAssociation_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -53337,7 +57325,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_association_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py" }, { "canonical": true, @@ -53346,19 +57334,19 @@ "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", "shortName": "RegionNetworkFirewallPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_effective_firewalls", + "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetEffectiveFirewalls", + "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.TestIamPermissions", "service": { "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", "shortName": "RegionNetworkFirewallPolicies" }, - "shortName": "GetEffectiveFirewalls" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetEffectiveFirewallsRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNetworkFirewallPolicyRequest" }, { "name": "project", @@ -53369,9 +57357,13 @@ "type": "str" }, { - "name": "network", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -53385,14 +57377,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.RegionNetworkFirewallPoliciesGetEffectiveFirewallsResponse", - "shortName": "get_effective_firewalls" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for GetEffectiveFirewalls", - "file": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetEffectiveFirewalls_sync", + "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -53425,41 +57417,33 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_effective_firewalls_sync.py" + "title": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_iam_policy", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.aggregated_list", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.AggregatedList", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, - "shortName": "GetIamPolicy" + "shortName": "AggregatedList" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetIamPolicyRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.AggregatedListRegionNotificationEndpointsRequest" }, { "name": "project", "type": "str" }, - { - "name": "region", - "type": "str" - }, - { - "name": "resource", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -53473,22 +57457,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "get_iam_policy" + "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.AggregatedListPager", + "shortName": "aggregated_list" }, - "description": "Sample for GetIamPolicy", - "file": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py", + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_region_notification_endpoints_aggregated_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_AggregatedList_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -53498,43 +57482,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_iam_policy_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_aggregated_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get_rule", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.GetRule", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, - "shortName": "GetRule" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest" }, { "name": "project", @@ -53545,7 +57529,7 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "notification_endpoint", "type": "str" }, { @@ -53561,14 +57545,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicyRule", - "shortName": "get_rule" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for GetRule", - "file": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_notification_endpoints_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_GetRule_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Delete_sync", "segments": [ { "end": 53, @@ -53601,28 +57585,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_rule_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.get", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Get", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest" }, { "name": "project", @@ -53633,7 +57617,7 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "notification_endpoint", "type": "str" }, { @@ -53649,14 +57633,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.FirewallPolicy", + "resultType": "google.cloud.compute_v1.types.NotificationEndpoint", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_network_firewall_policies_get_sync.py", + "file": "compute_v1_generated_region_notification_endpoints_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Get_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Get_sync", "segments": [ { "end": 53, @@ -53689,28 +57673,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_get_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Insert", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest" }, { "name": "project", @@ -53721,8 +57705,8 @@ "type": "str" }, { - "name": "firewall_policy_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicy" + "name": "notification_endpoint_resource", + "type": "google.cloud.compute_v1.types.NotificationEndpoint" }, { "name": "retry", @@ -53741,10 +57725,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_network_firewall_policies_insert_sync.py", + "file": "compute_v1_generated_region_notification_endpoints_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Insert_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Insert_sync", "segments": [ { "end": 52, @@ -53777,28 +57761,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_insert_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.list", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.List", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.List", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionNetworkFirewallPoliciesRequest" + "type": "google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest" }, { "name": "project", @@ -53821,14 +57805,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_network_firewall_policies.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_network_firewall_policies_list_sync.py", + "file": "compute_v1_generated_region_notification_endpoints_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_List_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_List_sync", "segments": [ { "end": 53, @@ -53861,28 +57845,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_list_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", + "shortName": "RegionNotificationEndpointsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch_rule", + "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.PatchRule", + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", + "shortName": "RegionNotificationEndpoints" }, - "shortName": "PatchRule" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNotificationEndpointRequest" }, { "name": "project", @@ -53893,12 +57877,12 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "resource", "type": "str" }, { - "name": "firewall_policy_rule_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicyRule" + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" }, { "name": "retry", @@ -53913,14 +57897,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_rule" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for PatchRule", - "file": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_notification_endpoints_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_PatchRule_sync", + "regionTag": "compute_v1_generated_RegionNotificationEndpoints_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -53953,28 +57937,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_patch_rule_sync.py" + "title": "compute_v1_generated_region_notification_endpoints_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.patch", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.Patch", + "fullName": "google.cloud.compute.v1.RegionOperations.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" }, - "shortName": "Patch" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionOperationRequest" }, { "name": "project", @@ -53985,13 +57969,9 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "operation", "type": "str" }, - { - "name": "firewall_policy_resource", - "type": "google.cloud.compute_v1.types.FirewallPolicy" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54005,14 +57985,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.DeleteRegionOperationResponse", + "shortName": "delete" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_network_firewall_policies_patch_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_operations_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_Patch_sync", + "regionTag": "compute_v1_generated_RegionOperations_Delete_sync", "segments": [ { "end": 53, @@ -54045,28 +58025,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_patch_sync.py" + "title": "compute_v1_generated_region_operations_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_association", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveAssociation", + "fullName": "google.cloud.compute.v1.RegionOperations.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" }, - "shortName": "RemoveAssociation" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveAssociationRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionOperationRequest" }, { "name": "project", @@ -54077,7 +58057,7 @@ "type": "str" }, { - "name": "firewall_policy", + "name": "operation", "type": "str" }, { @@ -54093,14 +58073,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_association" + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" }, - "description": "Sample for RemoveAssociation", - "file": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_region_operations_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveAssociation_sync", + "regionTag": "compute_v1_generated_RegionOperations_Get_sync", "segments": [ { "end": 53, @@ -54133,28 +58113,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_remove_association_sync.py" + "title": "compute_v1_generated_region_operations_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.remove_rule", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.RemoveRule", + "fullName": "google.cloud.compute.v1.RegionOperations.List", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" }, - "shortName": "RemoveRule" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveRuleRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.ListRegionOperationsRequest" }, { "name": "project", @@ -54164,10 +58144,6 @@ "name": "region", "type": "str" }, - { - "name": "firewall_policy", - "type": "str" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54181,14 +58157,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_rule" + "resultType": "google.cloud.compute_v1.services.region_operations.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for RemoveRule", - "file": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_region_operations_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_RemoveRule_sync", + "regionTag": "compute_v1_generated_RegionOperations_List_sync", "segments": [ { "end": 53, @@ -54206,43 +58182,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_remove_rule_sync.py" + "title": "compute_v1_generated_region_operations_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionOperationsClient", + "shortName": "RegionOperationsClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.set_iam_policy", + "fullName": "google.cloud.compute_v1.RegionOperationsClient.wait", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.SetIamPolicy", + "fullName": "google.cloud.compute.v1.RegionOperations.Wait", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionOperations", + "shortName": "RegionOperations" }, - "shortName": "SetIamPolicy" + "shortName": "Wait" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetIamPolicyRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.WaitRegionOperationRequest" }, { "name": "project", @@ -54253,13 +58229,9 @@ "type": "str" }, { - "name": "resource", + "name": "operation", "type": "str" }, - { - "name": "region_set_policy_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54273,14 +58245,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Policy", - "shortName": "set_iam_policy" + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "wait" }, - "description": "Sample for SetIamPolicy", - "file": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py", + "description": "Sample for Wait", + "file": "compute_v1_generated_region_operations_wait_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_SetIamPolicy_sync", + "regionTag": "compute_v1_generated_RegionOperations_Wait_sync", "segments": [ { "end": 53, @@ -54313,28 +58285,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_set_iam_policy_sync.py" + "title": "compute_v1_generated_region_operations_wait_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient", - "shortName": "RegionNetworkFirewallPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNetworkFirewallPoliciesClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.add_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.AddRule", "service": { - "fullName": "google.cloud.compute.v1.RegionNetworkFirewallPolicies", - "shortName": "RegionNetworkFirewallPolicies" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "TestIamPermissions" + "shortName": "AddRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNetworkFirewallPolicyRequest" + "type": "google.cloud.compute_v1.types.AddRuleRegionSecurityPolicyRequest" }, { "name": "project", @@ -54345,12 +58317,12 @@ "type": "str" }, { - "name": "resource", + "name": "security_policy", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" }, { "name": "retry", @@ -54365,14 +58337,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "add_rule" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py", + "description": "Sample for AddRule", + "file": "compute_v1_generated_region_security_policies_add_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNetworkFirewallPolicies_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_AddRule_sync", "segments": [ { "end": 53, @@ -54405,33 +58377,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_network_firewall_policies_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_security_policies_add_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.AggregatedList", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "AggregatedList" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListRegionNotificationEndpointsRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, + { + "name": "security_policy", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54445,22 +58425,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_region_notification_endpoints_aggregated_list_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_region_security_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_AggregatedList_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Delete_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -54470,43 +58450,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_aggregated_list_sync.py" + "title": "compute_v1_generated_region_security_policies_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.delete", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.get_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Delete", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.GetRule", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "Delete" + "shortName": "GetRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.GetRuleRegionSecurityPolicyRequest" }, { "name": "project", @@ -54517,7 +58497,7 @@ "type": "str" }, { - "name": "notification_endpoint", + "name": "security_policy", "type": "str" }, { @@ -54533,14 +58513,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "resultType": "google.cloud.compute_v1.types.SecurityPolicyRule", + "shortName": "get_rule" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_notification_endpoints_delete_sync.py", + "description": "Sample for GetRule", + "file": "compute_v1_generated_region_security_policies_get_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Delete_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_GetRule_sync", "segments": [ { "end": 53, @@ -54573,28 +58553,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_delete_sync.py" + "title": "compute_v1_generated_region_security_policies_get_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.get", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Get", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.GetRegionSecurityPolicyRequest" }, { "name": "project", @@ -54605,7 +58585,7 @@ "type": "str" }, { - "name": "notification_endpoint", + "name": "security_policy", "type": "str" }, { @@ -54621,14 +58601,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.NotificationEndpoint", + "resultType": "google.cloud.compute_v1.types.SecurityPolicy", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_notification_endpoints_get_sync.py", + "file": "compute_v1_generated_region_security_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Get_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Get_sync", "segments": [ { "end": 53, @@ -54661,28 +58641,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_get_sync.py" + "title": "compute_v1_generated_region_security_policies_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.insert", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.Insert", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest" }, { "name": "project", @@ -54693,8 +58673,8 @@ "type": "str" }, { - "name": "notification_endpoint_resource", - "type": "google.cloud.compute_v1.types.NotificationEndpoint" + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" }, { "name": "retry", @@ -54713,10 +58693,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_notification_endpoints_insert_sync.py", + "file": "compute_v1_generated_region_security_policies_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_Insert_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Insert_sync", "segments": [ { "end": 52, @@ -54749,28 +58729,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_insert_sync.py" + "title": "compute_v1_generated_region_security_policies_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.list", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.List", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.List", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionNotificationEndpointsRequest" + "type": "google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest" }, { "name": "project", @@ -54793,14 +58773,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_notification_endpoints.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_security_policies.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_notification_endpoints_list_sync.py", + "file": "compute_v1_generated_region_security_policies_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_List_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_List_sync", "segments": [ { "end": 53, @@ -54833,28 +58813,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_list_sync.py" + "title": "compute_v1_generated_region_security_policies_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient", - "shortName": "RegionNotificationEndpointsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionNotificationEndpointsClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.patch_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints.TestIamPermissions", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.PatchRule", "service": { - "fullName": "google.cloud.compute.v1.RegionNotificationEndpoints", - "shortName": "RegionNotificationEndpoints" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "TestIamPermissions" + "shortName": "PatchRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionNotificationEndpointRequest" + "type": "google.cloud.compute_v1.types.PatchRuleRegionSecurityPolicyRequest" }, { "name": "project", @@ -54865,12 +58845,12 @@ "type": "str" }, { - "name": "resource", + "name": "security_policy", "type": "str" }, { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "security_policy_rule_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicyRule" }, { "name": "retry", @@ -54885,14 +58865,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch_rule" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_region_notification_endpoints_test_iam_permissions_sync.py", + "description": "Sample for PatchRule", + "file": "compute_v1_generated_region_security_policies_patch_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionNotificationEndpoints_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_PatchRule_sync", "segments": [ { "end": 53, @@ -54925,28 +58905,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_notification_endpoints_test_iam_permissions_sync.py" + "title": "compute_v1_generated_region_security_policies_patch_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionOperationsClient", - "shortName": "RegionOperationsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.delete", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.patch", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.Delete", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Patch", "service": { - "fullName": "google.cloud.compute.v1.RegionOperations", - "shortName": "RegionOperations" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "Delete" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionOperationRequest" + "type": "google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest" }, { "name": "project", @@ -54957,9 +58937,13 @@ "type": "str" }, { - "name": "operation", + "name": "security_policy", "type": "str" }, + { + "name": "security_policy_resource", + "type": "google.cloud.compute_v1.types.SecurityPolicy" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -54973,14 +58957,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.DeleteRegionOperationResponse", - "shortName": "delete" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "patch" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_region_operations_delete_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_security_policies_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_Delete_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_Patch_sync", "segments": [ { "end": 53, @@ -55013,28 +58997,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_delete_sync.py" + "title": "compute_v1_generated_region_security_policies_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionOperationsClient", - "shortName": "RegionOperationsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.get", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.remove_rule", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.Get", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.RemoveRule", "service": { - "fullName": "google.cloud.compute.v1.RegionOperations", - "shortName": "RegionOperations" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "Get" + "shortName": "RemoveRule" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionOperationRequest" + "type": "google.cloud.compute_v1.types.RemoveRuleRegionSecurityPolicyRequest" }, { "name": "project", @@ -55045,7 +59029,7 @@ "type": "str" }, { - "name": "operation", + "name": "security_policy", "type": "str" }, { @@ -55061,14 +59045,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Operation", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "remove_rule" }, - "description": "Sample for Get", - "file": "compute_v1_generated_region_operations_get_sync.py", + "description": "Sample for RemoveRule", + "file": "compute_v1_generated_region_security_policies_remove_rule_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_Get_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_RemoveRule_sync", "segments": [ { "end": 53, @@ -55101,28 +59085,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_get_sync.py" + "title": "compute_v1_generated_region_security_policies_remove_rule_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionOperationsClient", - "shortName": "RegionOperationsClient" + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", + "shortName": "RegionSecurityPoliciesClient" }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.list", + "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.List", + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.RegionOperations", - "shortName": "RegionOperations" + "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", + "shortName": "RegionSecurityPolicies" }, - "shortName": "List" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionOperationsRequest" + "type": "google.cloud.compute_v1.types.SetLabelsRegionSecurityPolicyRequest" }, { "name": "project", @@ -55133,92 +59117,12 @@ "type": "str" }, { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, Union[str, bytes]]]" - } - ], - "resultType": "google.cloud.compute_v1.services.region_operations.pagers.ListPager", - "shortName": "list" - }, - "description": "Sample for List", - "file": "compute_v1_generated_region_operations_list_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_List_sync", - "segments": [ - { - "end": 53, - "start": 27, - "type": "FULL" - }, - { - "end": 53, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 54, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "compute_v1_generated_region_operations_list_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.compute_v1.RegionOperationsClient", - "shortName": "RegionOperationsClient" - }, - "fullName": "google.cloud.compute_v1.RegionOperationsClient.wait", - "method": { - "fullName": "google.cloud.compute.v1.RegionOperations.Wait", - "service": { - "fullName": "google.cloud.compute.v1.RegionOperations", - "shortName": "RegionOperations" - }, - "shortName": "Wait" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.compute_v1.types.WaitRegionOperationRequest" - }, - { - "name": "project", - "type": "str" - }, - { - "name": "region", + "name": "resource", "type": "str" }, { - "name": "operation", - "type": "str" + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" }, { "name": "retry", @@ -55233,14 +59137,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Operation", - "shortName": "wait" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "set_labels" }, - "description": "Sample for Wait", - "file": "compute_v1_generated_region_operations_wait_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_region_security_policies_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionOperations_Wait_sync", + "regionTag": "compute_v1_generated_RegionSecurityPolicies_SetLabels_sync", "segments": [ { "end": 53, @@ -55273,28 +59177,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_operations_wait_sync.py" + "title": "compute_v1_generated_region_security_policies_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotSettingsClient", + "shortName": "RegionSnapshotSettingsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.add_rule", + "fullName": "google.cloud.compute_v1.RegionSnapshotSettingsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.AddRule", + "fullName": "google.cloud.compute.v1.RegionSnapshotSettings.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshotSettings", + "shortName": "RegionSnapshotSettings" }, - "shortName": "AddRule" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AddRuleRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionSnapshotSettingRequest" }, { "name": "project", @@ -55305,12 +59209,92 @@ "type": "str" }, { - "name": "security_policy", + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.SnapshotSettings", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_region_snapshot_settings_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_RegionSnapshotSettings_Get_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_region_snapshot_settings_get_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.RegionSnapshotSettingsClient", + "shortName": "RegionSnapshotSettingsClient" + }, + "fullName": "google.cloud.compute_v1.RegionSnapshotSettingsClient.patch", + "method": { + "fullName": "google.cloud.compute.v1.RegionSnapshotSettings.Patch", + "service": { + "fullName": "google.cloud.compute.v1.RegionSnapshotSettings", + "shortName": "RegionSnapshotSettings" + }, + "shortName": "Patch" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.PatchRegionSnapshotSettingRequest" + }, + { + "name": "project", "type": "str" }, { - "name": "security_policy_rule_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + "name": "region", + "type": "str" + }, + { + "name": "snapshot_settings_resource", + "type": "google.cloud.compute_v1.types.SnapshotSettings" }, { "name": "retry", @@ -55326,21 +59310,21 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "add_rule" + "shortName": "patch" }, - "description": "Sample for AddRule", - "file": "compute_v1_generated_region_security_policies_add_rule_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_region_snapshot_settings_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_AddRule_sync", + "regionTag": "compute_v1_generated_RegionSnapshotSettings_Patch_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -55350,43 +59334,43 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_add_rule_sync.py" + "title": "compute_v1_generated_region_snapshot_settings_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.delete", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Delete", + "fullName": "google.cloud.compute.v1.RegionSnapshots.Delete", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.DeleteRegionSnapshotRequest" }, { "name": "project", @@ -55397,7 +59381,7 @@ "type": "str" }, { - "name": "security_policy", + "name": "snapshot", "type": "str" }, { @@ -55417,10 +59401,10 @@ "shortName": "delete" }, "description": "Sample for Delete", - "file": "compute_v1_generated_region_security_policies_delete_sync.py", + "file": "compute_v1_generated_region_snapshots_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_Delete_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_Delete_sync", "segments": [ { "end": 53, @@ -55453,28 +59437,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_delete_sync.py" + "title": "compute_v1_generated_region_snapshots_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.get_rule", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.get_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.GetRule", + "fullName": "google.cloud.compute.v1.RegionSnapshots.GetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, - "shortName": "GetRule" + "shortName": "GetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRuleRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.GetIamPolicyRegionSnapshotRequest" }, { "name": "project", @@ -55485,7 +59469,7 @@ "type": "str" }, { - "name": "security_policy", + "name": "resource", "type": "str" }, { @@ -55501,14 +59485,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.SecurityPolicyRule", - "shortName": "get_rule" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "get_iam_policy" }, - "description": "Sample for GetRule", - "file": "compute_v1_generated_region_security_policies_get_rule_sync.py", + "description": "Sample for GetIamPolicy", + "file": "compute_v1_generated_region_snapshots_get_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_GetRule_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_GetIamPolicy_sync", "segments": [ { "end": 53, @@ -55541,28 +59525,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_get_rule_sync.py" + "title": "compute_v1_generated_region_snapshots_get_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.get", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.get", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Get", + "fullName": "google.cloud.compute.v1.RegionSnapshots.Get", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.GetRegionSnapshotRequest" }, { "name": "project", @@ -55573,7 +59557,7 @@ "type": "str" }, { - "name": "security_policy", + "name": "snapshot", "type": "str" }, { @@ -55589,14 +59573,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.SecurityPolicy", + "resultType": "google.cloud.compute_v1.types.Snapshot", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_region_security_policies_get_sync.py", + "file": "compute_v1_generated_region_snapshots_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_Get_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_Get_sync", "segments": [ { "end": 53, @@ -55629,28 +59613,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_get_sync.py" + "title": "compute_v1_generated_region_snapshots_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.insert", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Insert", + "fullName": "google.cloud.compute.v1.RegionSnapshots.Insert", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.InsertRegionSnapshotRequest" }, { "name": "project", @@ -55661,8 +59645,8 @@ "type": "str" }, { - "name": "security_policy_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicy" + "name": "snapshot_resource", + "type": "google.cloud.compute_v1.types.Snapshot" }, { "name": "retry", @@ -55681,10 +59665,10 @@ "shortName": "insert" }, "description": "Sample for Insert", - "file": "compute_v1_generated_region_security_policies_insert_sync.py", + "file": "compute_v1_generated_region_snapshots_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_Insert_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_Insert_sync", "segments": [ { "end": 52, @@ -55717,28 +59701,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_insert_sync.py" + "title": "compute_v1_generated_region_snapshots_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.list", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.list", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.List", + "fullName": "google.cloud.compute.v1.RegionSnapshots.List", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListRegionSecurityPoliciesRequest" + "type": "google.cloud.compute_v1.types.ListRegionSnapshotsRequest" }, { "name": "project", @@ -55761,14 +59745,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.region_security_policies.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.region_snapshots.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_region_security_policies_list_sync.py", + "file": "compute_v1_generated_region_snapshots_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_List_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_List_sync", "segments": [ { "end": 53, @@ -55801,28 +59785,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_list_sync.py" + "title": "compute_v1_generated_region_snapshots_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.patch_rule", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.set_iam_policy", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.PatchRule", + "fullName": "google.cloud.compute.v1.RegionSnapshots.SetIamPolicy", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, - "shortName": "PatchRule" + "shortName": "SetIamPolicy" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRuleRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.SetIamPolicyRegionSnapshotRequest" }, { "name": "project", @@ -55833,12 +59817,12 @@ "type": "str" }, { - "name": "security_policy", + "name": "resource", "type": "str" }, { - "name": "security_policy_rule_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicyRule" + "name": "region_set_policy_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetPolicyRequest" }, { "name": "retry", @@ -55853,14 +59837,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch_rule" + "resultType": "google.cloud.compute_v1.types.Policy", + "shortName": "set_iam_policy" }, - "description": "Sample for PatchRule", - "file": "compute_v1_generated_region_security_policies_patch_rule_sync.py", + "description": "Sample for SetIamPolicy", + "file": "compute_v1_generated_region_snapshots_set_iam_policy_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_PatchRule_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_SetIamPolicy_sync", "segments": [ { "end": 53, @@ -55893,28 +59877,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_patch_rule_sync.py" + "title": "compute_v1_generated_region_snapshots_set_iam_policy_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.patch", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.Patch", + "fullName": "google.cloud.compute.v1.RegionSnapshots.SetLabels", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, - "shortName": "Patch" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.SetLabelsRegionSnapshotRequest" }, { "name": "project", @@ -55925,12 +59909,12 @@ "type": "str" }, { - "name": "security_policy", + "name": "resource", "type": "str" }, { - "name": "security_policy_resource", - "type": "google.cloud.compute_v1.types.SecurityPolicy" + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" }, { "name": "retry", @@ -55946,13 +59930,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "shortName": "set_labels" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_region_security_policies_patch_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_region_snapshots_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_Patch_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_SetLabels_sync", "segments": [ { "end": 53, @@ -55985,28 +59969,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_patch_sync.py" + "title": "compute_v1_generated_region_snapshots_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.remove_rule", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.test_iam_permissions", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.RemoveRule", + "fullName": "google.cloud.compute.v1.RegionSnapshots.TestIamPermissions", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, - "shortName": "RemoveRule" + "shortName": "TestIamPermissions" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.RemoveRuleRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.TestIamPermissionsRegionSnapshotRequest" }, { "name": "project", @@ -56017,9 +60001,13 @@ "type": "str" }, { - "name": "security_policy", + "name": "resource", "type": "str" }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -56033,14 +60021,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "remove_rule" + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" }, - "description": "Sample for RemoveRule", - "file": "compute_v1_generated_region_security_policies_remove_rule_sync.py", + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_region_snapshots_test_iam_permissions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_RemoveRule_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_TestIamPermissions_sync", "segments": [ { "end": 53, @@ -56073,28 +60061,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_remove_rule_sync.py" + "title": "compute_v1_generated_region_snapshots_test_iam_permissions_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient", - "shortName": "RegionSecurityPoliciesClient" + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient", + "shortName": "RegionSnapshotsClient" }, - "fullName": "google.cloud.compute_v1.RegionSecurityPoliciesClient.set_labels", + "fullName": "google.cloud.compute_v1.RegionSnapshotsClient.update_kms_key", "method": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies.SetLabels", + "fullName": "google.cloud.compute.v1.RegionSnapshots.UpdateKmsKey", "service": { - "fullName": "google.cloud.compute.v1.RegionSecurityPolicies", - "shortName": "RegionSecurityPolicies" + "fullName": "google.cloud.compute.v1.RegionSnapshots", + "shortName": "RegionSnapshots" }, - "shortName": "SetLabels" + "shortName": "UpdateKmsKey" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsRegionSecurityPolicyRequest" + "type": "google.cloud.compute_v1.types.UpdateKmsKeyRegionSnapshotRequest" }, { "name": "project", @@ -56105,12 +60093,12 @@ "type": "str" }, { - "name": "resource", + "name": "snapshot", "type": "str" }, { - "name": "region_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + "name": "region_snapshot_update_kms_key_request_resource", + "type": "google.cloud.compute_v1.types.RegionSnapshotUpdateKmsKeyRequest" }, { "name": "retry", @@ -56126,13 +60114,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "shortName": "update_kms_key" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_region_security_policies_set_labels_sync.py", + "description": "Sample for UpdateKmsKey", + "file": "compute_v1_generated_region_snapshots_update_kms_key_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_RegionSecurityPolicies_SetLabels_sync", + "regionTag": "compute_v1_generated_RegionSnapshots_UpdateKmsKey_sync", "segments": [ { "end": 53, @@ -56165,7 +60153,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_region_security_policies_set_labels_sync.py" + "title": "compute_v1_generated_region_snapshots_update_kms_key_sync.py" }, { "canonical": true, @@ -67343,6 +71331,94 @@ ], "title": "compute_v1_generated_snapshots_test_iam_permissions_sync.py" }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.SnapshotsClient", + "shortName": "SnapshotsClient" + }, + "fullName": "google.cloud.compute_v1.SnapshotsClient.update_kms_key", + "method": { + "fullName": "google.cloud.compute.v1.Snapshots.UpdateKmsKey", + "service": { + "fullName": "google.cloud.compute.v1.Snapshots", + "shortName": "Snapshots" + }, + "shortName": "UpdateKmsKey" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.UpdateKmsKeySnapshotRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "snapshot", + "type": "str" + }, + { + "name": "snapshot_update_kms_key_request_resource", + "type": "google.cloud.compute_v1.types.SnapshotUpdateKmsKeyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update_kms_key" + }, + "description": "Sample for UpdateKmsKey", + "file": "compute_v1_generated_snapshots_update_kms_key_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_Snapshots_UpdateKmsKey_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_snapshots_update_kms_key_sync.py" + }, { "canonical": true, "clientMethod": { @@ -77761,28 +81837,376 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_gateways_set_labels_sync.py" + "title": "compute_v1_generated_vpn_gateways_set_labels_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnGatewaysClient", + "shortName": "VpnGatewaysClient" + }, + "fullName": "google.cloud.compute_v1.VpnGatewaysClient.test_iam_permissions", + "method": { + "fullName": "google.cloud.compute.v1.VpnGateways.TestIamPermissions", + "service": { + "fullName": "google.cloud.compute.v1.VpnGateways", + "shortName": "VpnGateways" + }, + "shortName": "TestIamPermissions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.TestIamPermissionsVpnGatewayRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "resource", + "type": "str" + }, + { + "name": "test_permissions_request_resource", + "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", + "shortName": "test_iam_permissions" + }, + "description": "Sample for TestIamPermissions", + "file": "compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnGateways_TestIamPermissions_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.aggregated_list", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.AggregatedList", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "AggregatedList" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.services.vpn_tunnels.pagers.AggregatedListPager", + "shortName": "aggregated_list" + }, + "description": "Sample for AggregatedList", + "file": "compute_v1_generated_vpn_tunnels_aggregated_list_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_AggregatedList_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_aggregated_list_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.delete", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.Delete", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "Delete" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.DeleteVpnTunnelRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_tunnel", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" + }, + "description": "Sample for Delete", + "file": "compute_v1_generated_vpn_tunnels_delete_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_Delete_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_delete_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" + }, + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.get", + "method": { + "fullName": "google.cloud.compute.v1.VpnTunnels.Get", + "service": { + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" + }, + "shortName": "Get" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.GetVpnTunnelRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "region", + "type": "str" + }, + { + "name": "vpn_tunnel", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.compute_v1.types.VpnTunnel", + "shortName": "get" + }, + "description": "Sample for Get", + "file": "compute_v1_generated_vpn_tunnels_get_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_VpnTunnels_Get_sync", + "segments": [ + { + "end": 53, + "start": 27, + "type": "FULL" + }, + { + "end": 53, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 47, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 50, + "start": 48, + "type": "REQUEST_EXECUTION" + }, + { + "end": 54, + "start": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_vpn_tunnels_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.VpnGatewaysClient", - "shortName": "VpnGatewaysClient" + "fullName": "google.cloud.compute_v1.VpnTunnelsClient", + "shortName": "VpnTunnelsClient" }, - "fullName": "google.cloud.compute_v1.VpnGatewaysClient.test_iam_permissions", + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.VpnGateways.TestIamPermissions", + "fullName": "google.cloud.compute.v1.VpnTunnels.Insert", "service": { - "fullName": "google.cloud.compute.v1.VpnGateways", - "shortName": "VpnGateways" + "fullName": "google.cloud.compute.v1.VpnTunnels", + "shortName": "VpnTunnels" }, - "shortName": "TestIamPermissions" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.TestIamPermissionsVpnGatewayRequest" + "type": "google.cloud.compute_v1.types.InsertVpnTunnelRequest" }, { "name": "project", @@ -77793,12 +82217,8 @@ "type": "str" }, { - "name": "resource", - "type": "str" - }, - { - "name": "test_permissions_request_resource", - "type": "google.cloud.compute_v1.types.TestPermissionsRequest" + "name": "vpn_tunnel_resource", + "type": "google.cloud.compute_v1.types.VpnTunnel" }, { "name": "retry", @@ -77813,22 +82233,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.TestPermissionsResponse", - "shortName": "test_iam_permissions" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for TestIamPermissions", - "file": "compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_vpn_tunnels_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnGateways_TestIamPermissions_sync", + "regionTag": "compute_v1_generated_VpnTunnels_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -77838,22 +82258,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 54, - "start": 51, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_gateways_test_iam_permissions_sync.py" + "title": "compute_v1_generated_vpn_tunnels_insert_sync.py" }, { "canonical": true, @@ -77862,24 +82282,28 @@ "fullName": "google.cloud.compute_v1.VpnTunnelsClient", "shortName": "VpnTunnelsClient" }, - "fullName": "google.cloud.compute_v1.VpnTunnelsClient.aggregated_list", + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.list", "method": { - "fullName": "google.cloud.compute.v1.VpnTunnels.AggregatedList", + "fullName": "google.cloud.compute.v1.VpnTunnels.List", "service": { "fullName": "google.cloud.compute.v1.VpnTunnels", "shortName": "VpnTunnels" }, - "shortName": "AggregatedList" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.AggregatedListVpnTunnelsRequest" + "type": "google.cloud.compute_v1.types.ListVpnTunnelsRequest" }, { "name": "project", "type": "str" }, + { + "name": "region", + "type": "str" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -77893,22 +82317,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.vpn_tunnels.pagers.AggregatedListPager", - "shortName": "aggregated_list" + "resultType": "google.cloud.compute_v1.services.vpn_tunnels.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for AggregatedList", - "file": "compute_v1_generated_vpn_tunnels_aggregated_list_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_vpn_tunnels_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnTunnels_AggregatedList_sync", + "regionTag": "compute_v1_generated_VpnTunnels_List_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -77918,22 +82342,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 54, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_tunnels_aggregated_list_sync.py" + "title": "compute_v1_generated_vpn_tunnels_list_sync.py" }, { "canonical": true, @@ -77942,19 +82366,19 @@ "fullName": "google.cloud.compute_v1.VpnTunnelsClient", "shortName": "VpnTunnelsClient" }, - "fullName": "google.cloud.compute_v1.VpnTunnelsClient.delete", + "fullName": "google.cloud.compute_v1.VpnTunnelsClient.set_labels", "method": { - "fullName": "google.cloud.compute.v1.VpnTunnels.Delete", + "fullName": "google.cloud.compute.v1.VpnTunnels.SetLabels", "service": { "fullName": "google.cloud.compute.v1.VpnTunnels", "shortName": "VpnTunnels" }, - "shortName": "Delete" + "shortName": "SetLabels" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteVpnTunnelRequest" + "type": "google.cloud.compute_v1.types.SetLabelsVpnTunnelRequest" }, { "name": "project", @@ -77965,9 +82389,13 @@ "type": "str" }, { - "name": "vpn_tunnel", + "name": "resource", "type": "str" }, + { + "name": "region_set_labels_request_resource", + "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -77982,13 +82410,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "set_labels" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_vpn_tunnels_delete_sync.py", + "description": "Sample for SetLabels", + "file": "compute_v1_generated_vpn_tunnels_set_labels_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnTunnels_Delete_sync", + "regionTag": "compute_v1_generated_VpnTunnels_SetLabels_sync", "segments": [ { "end": 53, @@ -78021,39 +82449,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_tunnels_delete_sync.py" + "title": "compute_v1_generated_vpn_tunnels_set_labels_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.VpnTunnelsClient", - "shortName": "VpnTunnelsClient" + "fullName": "google.cloud.compute_v1.WireGroupsClient", + "shortName": "WireGroupsClient" }, - "fullName": "google.cloud.compute_v1.VpnTunnelsClient.get", + "fullName": "google.cloud.compute_v1.WireGroupsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.VpnTunnels.Get", + "fullName": "google.cloud.compute.v1.WireGroups.Delete", "service": { - "fullName": "google.cloud.compute.v1.VpnTunnels", - "shortName": "VpnTunnels" + "fullName": "google.cloud.compute.v1.WireGroups", + "shortName": "WireGroups" }, - "shortName": "Get" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetVpnTunnelRequest" + "type": "google.cloud.compute_v1.types.DeleteWireGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "cross_site_network", "type": "str" }, { - "name": "vpn_tunnel", + "name": "wire_group", "type": "str" }, { @@ -78069,14 +82497,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.VpnTunnel", - "shortName": "get" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "delete" }, - "description": "Sample for Get", - "file": "compute_v1_generated_vpn_tunnels_get_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_wire_groups_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnTunnels_Get_sync", + "regionTag": "compute_v1_generated_WireGroups_Delete_sync", "segments": [ { "end": 53, @@ -78109,40 +82537,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_tunnels_get_sync.py" + "title": "compute_v1_generated_wire_groups_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.VpnTunnelsClient", - "shortName": "VpnTunnelsClient" + "fullName": "google.cloud.compute_v1.WireGroupsClient", + "shortName": "WireGroupsClient" }, - "fullName": "google.cloud.compute_v1.VpnTunnelsClient.insert", + "fullName": "google.cloud.compute_v1.WireGroupsClient.get", "method": { - "fullName": "google.cloud.compute.v1.VpnTunnels.Insert", + "fullName": "google.cloud.compute.v1.WireGroups.Get", "service": { - "fullName": "google.cloud.compute.v1.VpnTunnels", - "shortName": "VpnTunnels" + "fullName": "google.cloud.compute.v1.WireGroups", + "shortName": "WireGroups" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertVpnTunnelRequest" + "type": "google.cloud.compute_v1.types.GetWireGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "cross_site_network", "type": "str" }, { - "name": "vpn_tunnel_resource", - "type": "google.cloud.compute_v1.types.VpnTunnel" + "name": "wire_group", + "type": "str" }, { "name": "retry", @@ -78157,22 +82585,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.WireGroup", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_vpn_tunnels_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_wire_groups_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnTunnels_Insert_sync", + "regionTag": "compute_v1_generated_WireGroups_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -78182,52 +82610,56 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_tunnels_insert_sync.py" + "title": "compute_v1_generated_wire_groups_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.VpnTunnelsClient", - "shortName": "VpnTunnelsClient" + "fullName": "google.cloud.compute_v1.WireGroupsClient", + "shortName": "WireGroupsClient" }, - "fullName": "google.cloud.compute_v1.VpnTunnelsClient.list", + "fullName": "google.cloud.compute_v1.WireGroupsClient.insert", "method": { - "fullName": "google.cloud.compute.v1.VpnTunnels.List", + "fullName": "google.cloud.compute.v1.WireGroups.Insert", "service": { - "fullName": "google.cloud.compute.v1.VpnTunnels", - "shortName": "VpnTunnels" + "fullName": "google.cloud.compute.v1.WireGroups", + "shortName": "WireGroups" }, - "shortName": "List" + "shortName": "Insert" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListVpnTunnelsRequest" + "type": "google.cloud.compute_v1.types.InsertWireGroupRequest" }, { "name": "project", "type": "str" }, { - "name": "region", + "name": "cross_site_network", "type": "str" }, + { + "name": "wire_group_resource", + "type": "google.cloud.compute_v1.types.WireGroup" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -78241,22 +82673,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.vpn_tunnels.pagers.ListPager", - "shortName": "list" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" }, - "description": "Sample for List", - "file": "compute_v1_generated_vpn_tunnels_list_sync.py", + "description": "Sample for Insert", + "file": "compute_v1_generated_wire_groups_insert_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnTunnels_List_sync", + "regionTag": "compute_v1_generated_WireGroups_Insert_sync", "segments": [ { - "end": 53, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 53, + "end": 52, "start": 27, "type": "SHORT" }, @@ -78276,50 +82708,42 @@ "type": "REQUEST_EXECUTION" }, { - "end": 54, + "end": 53, "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_tunnels_list_sync.py" + "title": "compute_v1_generated_wire_groups_insert_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.VpnTunnelsClient", - "shortName": "VpnTunnelsClient" + "fullName": "google.cloud.compute_v1.WireGroupsClient", + "shortName": "WireGroupsClient" }, - "fullName": "google.cloud.compute_v1.VpnTunnelsClient.set_labels", + "fullName": "google.cloud.compute_v1.WireGroupsClient.list", "method": { - "fullName": "google.cloud.compute.v1.VpnTunnels.SetLabels", + "fullName": "google.cloud.compute.v1.WireGroups.List", "service": { - "fullName": "google.cloud.compute.v1.VpnTunnels", - "shortName": "VpnTunnels" + "fullName": "google.cloud.compute.v1.WireGroups", + "shortName": "WireGroups" }, - "shortName": "SetLabels" + "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.SetLabelsVpnTunnelRequest" + "type": "google.cloud.compute_v1.types.ListWireGroupsRequest" }, { "name": "project", "type": "str" }, { - "name": "region", - "type": "str" - }, - { - "name": "resource", + "name": "cross_site_network", "type": "str" }, - { - "name": "region_set_labels_request_resource", - "type": "google.cloud.compute_v1.types.RegionSetLabelsRequest" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -78333,14 +82757,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "set_labels" + "resultType": "google.cloud.compute_v1.services.wire_groups.pagers.ListPager", + "shortName": "list" }, - "description": "Sample for SetLabels", - "file": "compute_v1_generated_vpn_tunnels_set_labels_sync.py", + "description": "Sample for List", + "file": "compute_v1_generated_wire_groups_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_VpnTunnels_SetLabels_sync", + "regionTag": "compute_v1_generated_WireGroups_List_sync", "segments": [ { "end": 53, @@ -78358,22 +82782,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 47, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 50, - "start": 48, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { "end": 54, - "start": 51, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_vpn_tunnels_set_labels_sync.py" + "title": "compute_v1_generated_wire_groups_list_sync.py" }, { "canonical": true, @@ -78382,19 +82806,19 @@ "fullName": "google.cloud.compute_v1.WireGroupsClient", "shortName": "WireGroupsClient" }, - "fullName": "google.cloud.compute_v1.WireGroupsClient.delete", + "fullName": "google.cloud.compute_v1.WireGroupsClient.patch", "method": { - "fullName": "google.cloud.compute.v1.WireGroups.Delete", + "fullName": "google.cloud.compute.v1.WireGroups.Patch", "service": { "fullName": "google.cloud.compute.v1.WireGroups", "shortName": "WireGroups" }, - "shortName": "Delete" + "shortName": "Patch" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteWireGroupRequest" + "type": "google.cloud.compute_v1.types.PatchWireGroupRequest" }, { "name": "project", @@ -78408,6 +82832,10 @@ "name": "wire_group", "type": "str" }, + { + "name": "wire_group_resource", + "type": "google.cloud.compute_v1.types.WireGroup" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -78422,13 +82850,13 @@ } ], "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "delete" + "shortName": "patch" }, - "description": "Sample for Delete", - "file": "compute_v1_generated_wire_groups_delete_sync.py", + "description": "Sample for Patch", + "file": "compute_v1_generated_wire_groups_patch_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_WireGroups_Delete_sync", + "regionTag": "compute_v1_generated_WireGroups_Patch_sync", "segments": [ { "end": 53, @@ -78461,39 +82889,39 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_wire_groups_delete_sync.py" + "title": "compute_v1_generated_wire_groups_patch_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.WireGroupsClient", - "shortName": "WireGroupsClient" + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" }, - "fullName": "google.cloud.compute_v1.WireGroupsClient.get", + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.delete", "method": { - "fullName": "google.cloud.compute.v1.WireGroups.Get", + "fullName": "google.cloud.compute.v1.ZoneOperations.Delete", "service": { - "fullName": "google.cloud.compute.v1.WireGroups", - "shortName": "WireGroups" + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" }, - "shortName": "Get" + "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetWireGroupRequest" + "type": "google.cloud.compute_v1.types.DeleteZoneOperationRequest" }, { "name": "project", "type": "str" }, { - "name": "cross_site_network", + "name": "zone", "type": "str" }, { - "name": "wire_group", + "name": "operation", "type": "str" }, { @@ -78509,14 +82937,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.WireGroup", - "shortName": "get" + "resultType": "google.cloud.compute_v1.types.DeleteZoneOperationResponse", + "shortName": "delete" }, - "description": "Sample for Get", - "file": "compute_v1_generated_wire_groups_get_sync.py", + "description": "Sample for Delete", + "file": "compute_v1_generated_zone_operations_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_WireGroups_Get_sync", + "regionTag": "compute_v1_generated_ZoneOperations_Delete_sync", "segments": [ { "end": 53, @@ -78549,40 +82977,40 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_wire_groups_get_sync.py" + "title": "compute_v1_generated_zone_operations_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.WireGroupsClient", - "shortName": "WireGroupsClient" + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" }, - "fullName": "google.cloud.compute_v1.WireGroupsClient.insert", + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.get", "method": { - "fullName": "google.cloud.compute.v1.WireGroups.Insert", + "fullName": "google.cloud.compute.v1.ZoneOperations.Get", "service": { - "fullName": "google.cloud.compute.v1.WireGroups", - "shortName": "WireGroups" + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" }, - "shortName": "Insert" + "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.InsertWireGroupRequest" + "type": "google.cloud.compute_v1.types.GetZoneOperationRequest" }, { "name": "project", "type": "str" }, { - "name": "cross_site_network", + "name": "zone", "type": "str" }, { - "name": "wire_group_resource", - "type": "google.cloud.compute_v1.types.WireGroup" + "name": "operation", + "type": "str" }, { "name": "retry", @@ -78597,22 +83025,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "insert" + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "get" }, - "description": "Sample for Insert", - "file": "compute_v1_generated_wire_groups_insert_sync.py", + "description": "Sample for Get", + "file": "compute_v1_generated_zone_operations_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_WireGroups_Insert_sync", + "regionTag": "compute_v1_generated_ZoneOperations_Get_sync", "segments": [ { - "end": 52, + "end": 53, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 53, "start": 27, "type": "SHORT" }, @@ -78622,50 +83050,50 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 47, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 50, + "start": 48, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 50, + "end": 54, + "start": 51, "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_wire_groups_insert_sync.py" + "title": "compute_v1_generated_zone_operations_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.WireGroupsClient", - "shortName": "WireGroupsClient" + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" }, - "fullName": "google.cloud.compute_v1.WireGroupsClient.list", + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.list", "method": { - "fullName": "google.cloud.compute.v1.WireGroups.List", + "fullName": "google.cloud.compute.v1.ZoneOperations.List", "service": { - "fullName": "google.cloud.compute.v1.WireGroups", - "shortName": "WireGroups" + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListWireGroupsRequest" + "type": "google.cloud.compute_v1.types.ListZoneOperationsRequest" }, { "name": "project", "type": "str" }, { - "name": "cross_site_network", + "name": "zone", "type": "str" }, { @@ -78681,14 +83109,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.wire_groups.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.zone_operations.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_wire_groups_list_sync.py", + "file": "compute_v1_generated_zone_operations_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_WireGroups_List_sync", + "regionTag": "compute_v1_generated_ZoneOperations_List_sync", "segments": [ { "end": 53, @@ -78721,45 +83149,41 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_wire_groups_list_sync.py" + "title": "compute_v1_generated_zone_operations_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.WireGroupsClient", - "shortName": "WireGroupsClient" + "fullName": "google.cloud.compute_v1.ZoneOperationsClient", + "shortName": "ZoneOperationsClient" }, - "fullName": "google.cloud.compute_v1.WireGroupsClient.patch", + "fullName": "google.cloud.compute_v1.ZoneOperationsClient.wait", "method": { - "fullName": "google.cloud.compute.v1.WireGroups.Patch", + "fullName": "google.cloud.compute.v1.ZoneOperations.Wait", "service": { - "fullName": "google.cloud.compute.v1.WireGroups", - "shortName": "WireGroups" + "fullName": "google.cloud.compute.v1.ZoneOperations", + "shortName": "ZoneOperations" }, - "shortName": "Patch" + "shortName": "Wait" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.PatchWireGroupRequest" + "type": "google.cloud.compute_v1.types.WaitZoneOperationRequest" }, { "name": "project", "type": "str" }, { - "name": "cross_site_network", + "name": "zone", "type": "str" }, { - "name": "wire_group", + "name": "operation", "type": "str" }, - { - "name": "wire_group_resource", - "type": "google.cloud.compute_v1.types.WireGroup" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -78773,14 +83197,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.api_core.extended_operation.ExtendedOperation", - "shortName": "patch" + "resultType": "google.cloud.compute_v1.types.Operation", + "shortName": "wait" }, - "description": "Sample for Patch", - "file": "compute_v1_generated_wire_groups_patch_sync.py", + "description": "Sample for Wait", + "file": "compute_v1_generated_zone_operations_wait_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_WireGroups_Patch_sync", + "regionTag": "compute_v1_generated_ZoneOperations_Wait_sync", "segments": [ { "end": 53, @@ -78813,28 +83237,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_wire_groups_patch_sync.py" + "title": "compute_v1_generated_zone_operations_wait_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ZoneOperationsClient", - "shortName": "ZoneOperationsClient" + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient", + "shortName": "ZoneVmExtensionPoliciesClient" }, - "fullName": "google.cloud.compute_v1.ZoneOperationsClient.delete", + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.delete", "method": { - "fullName": "google.cloud.compute.v1.ZoneOperations.Delete", + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies.Delete", "service": { - "fullName": "google.cloud.compute.v1.ZoneOperations", - "shortName": "ZoneOperations" + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "shortName": "ZoneVmExtensionPolicies" }, "shortName": "Delete" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.DeleteZoneOperationRequest" + "type": "google.cloud.compute_v1.types.DeleteZoneVmExtensionPolicyRequest" }, { "name": "project", @@ -78845,7 +83269,7 @@ "type": "str" }, { - "name": "operation", + "name": "vm_extension_policy", "type": "str" }, { @@ -78861,14 +83285,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.DeleteZoneOperationResponse", + "resultType": "google.api_core.extended_operation.ExtendedOperation", "shortName": "delete" }, "description": "Sample for Delete", - "file": "compute_v1_generated_zone_operations_delete_sync.py", + "file": "compute_v1_generated_zone_vm_extension_policies_delete_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_ZoneOperations_Delete_sync", + "regionTag": "compute_v1_generated_ZoneVmExtensionPolicies_Delete_sync", "segments": [ { "end": 53, @@ -78901,28 +83325,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_zone_operations_delete_sync.py" + "title": "compute_v1_generated_zone_vm_extension_policies_delete_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ZoneOperationsClient", - "shortName": "ZoneOperationsClient" + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient", + "shortName": "ZoneVmExtensionPoliciesClient" }, - "fullName": "google.cloud.compute_v1.ZoneOperationsClient.get", + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.get", "method": { - "fullName": "google.cloud.compute.v1.ZoneOperations.Get", + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies.Get", "service": { - "fullName": "google.cloud.compute.v1.ZoneOperations", - "shortName": "ZoneOperations" + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "shortName": "ZoneVmExtensionPolicies" }, "shortName": "Get" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.GetZoneOperationRequest" + "type": "google.cloud.compute_v1.types.GetZoneVmExtensionPolicyRequest" }, { "name": "project", @@ -78933,7 +83357,7 @@ "type": "str" }, { - "name": "operation", + "name": "vm_extension_policy", "type": "str" }, { @@ -78949,14 +83373,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Operation", + "resultType": "google.cloud.compute_v1.types.VmExtensionPolicy", "shortName": "get" }, "description": "Sample for Get", - "file": "compute_v1_generated_zone_operations_get_sync.py", + "file": "compute_v1_generated_zone_vm_extension_policies_get_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_ZoneOperations_Get_sync", + "regionTag": "compute_v1_generated_ZoneVmExtensionPolicies_Get_sync", "segments": [ { "end": 53, @@ -78989,28 +83413,116 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_zone_operations_get_sync.py" + "title": "compute_v1_generated_zone_vm_extension_policies_get_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ZoneOperationsClient", - "shortName": "ZoneOperationsClient" + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient", + "shortName": "ZoneVmExtensionPoliciesClient" }, - "fullName": "google.cloud.compute_v1.ZoneOperationsClient.list", + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.insert", "method": { - "fullName": "google.cloud.compute.v1.ZoneOperations.List", + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies.Insert", "service": { - "fullName": "google.cloud.compute.v1.ZoneOperations", - "shortName": "ZoneOperations" + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "shortName": "ZoneVmExtensionPolicies" + }, + "shortName": "Insert" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.compute_v1.types.InsertZoneVmExtensionPolicyRequest" + }, + { + "name": "project", + "type": "str" + }, + { + "name": "zone", + "type": "str" + }, + { + "name": "vm_extension_policy_resource", + "type": "google.cloud.compute_v1.types.VmExtensionPolicy" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "insert" + }, + "description": "Sample for Insert", + "file": "compute_v1_generated_zone_vm_extension_policies_insert_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "compute_v1_generated_ZoneVmExtensionPolicies_Insert_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "compute_v1_generated_zone_vm_extension_policies_insert_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient", + "shortName": "ZoneVmExtensionPoliciesClient" + }, + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.list", + "method": { + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies.List", + "service": { + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "shortName": "ZoneVmExtensionPolicies" }, "shortName": "List" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.ListZoneOperationsRequest" + "type": "google.cloud.compute_v1.types.ListZoneVmExtensionPoliciesRequest" }, { "name": "project", @@ -79033,14 +83545,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.services.zone_operations.pagers.ListPager", + "resultType": "google.cloud.compute_v1.services.zone_vm_extension_policies.pagers.ListPager", "shortName": "list" }, "description": "Sample for List", - "file": "compute_v1_generated_zone_operations_list_sync.py", + "file": "compute_v1_generated_zone_vm_extension_policies_list_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_ZoneOperations_List_sync", + "regionTag": "compute_v1_generated_ZoneVmExtensionPolicies_List_sync", "segments": [ { "end": 53, @@ -79073,28 +83585,28 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_zone_operations_list_sync.py" + "title": "compute_v1_generated_zone_vm_extension_policies_list_sync.py" }, { "canonical": true, "clientMethod": { "client": { - "fullName": "google.cloud.compute_v1.ZoneOperationsClient", - "shortName": "ZoneOperationsClient" + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient", + "shortName": "ZoneVmExtensionPoliciesClient" }, - "fullName": "google.cloud.compute_v1.ZoneOperationsClient.wait", + "fullName": "google.cloud.compute_v1.ZoneVmExtensionPoliciesClient.update", "method": { - "fullName": "google.cloud.compute.v1.ZoneOperations.Wait", + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies.Update", "service": { - "fullName": "google.cloud.compute.v1.ZoneOperations", - "shortName": "ZoneOperations" + "fullName": "google.cloud.compute.v1.ZoneVmExtensionPolicies", + "shortName": "ZoneVmExtensionPolicies" }, - "shortName": "Wait" + "shortName": "Update" }, "parameters": [ { "name": "request", - "type": "google.cloud.compute_v1.types.WaitZoneOperationRequest" + "type": "google.cloud.compute_v1.types.UpdateZoneVmExtensionPolicyRequest" }, { "name": "project", @@ -79105,9 +83617,13 @@ "type": "str" }, { - "name": "operation", + "name": "vm_extension_policy", "type": "str" }, + { + "name": "vm_extension_policy_resource", + "type": "google.cloud.compute_v1.types.VmExtensionPolicy" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -79121,14 +83637,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.compute_v1.types.Operation", - "shortName": "wait" + "resultType": "google.api_core.extended_operation.ExtendedOperation", + "shortName": "update" }, - "description": "Sample for Wait", - "file": "compute_v1_generated_zone_operations_wait_sync.py", + "description": "Sample for Update", + "file": "compute_v1_generated_zone_vm_extension_policies_update_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "compute_v1_generated_ZoneOperations_Wait_sync", + "regionTag": "compute_v1_generated_ZoneVmExtensionPolicies_Update_sync", "segments": [ { "end": 53, @@ -79161,7 +83677,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "compute_v1_generated_zone_operations_wait_sync.py" + "title": "compute_v1_generated_zone_vm_extension_policies_update_sync.py" }, { "canonical": true, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py index 4cd27b01ac5f..e9ed3e36460c 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_buckets.py @@ -1562,6 +1562,284 @@ def test_add_signed_url_key_unary_rest_flattened_error(transport: str = "rest"): ) +def test_aggregated_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.aggregated_list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.aggregated_list] = mock_rpc + + request = {} + client.aggregated_list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.aggregated_list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_aggregated_list_rest_required_fields( + request_type=compute.AggregatedListBackendBucketsRequest, +): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).aggregated_list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "include_all_scopes", + "max_results", + "order_by", + "page_token", + "return_partial_success", + "service_project_number", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketAggregatedList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.aggregated_list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_aggregated_list_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.aggregated_list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "includeAllScopes", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + "serviceProjectNumber", + ) + ) + & set(("project",)) + ) + + +def test_aggregated_list_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketAggregatedList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.BackendBucketAggregatedList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.aggregated_list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/aggregated/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_aggregated_list_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.aggregated_list( + compute.AggregatedListBackendBucketsRequest(), + project="project_value", + ) + + +def test_aggregated_list_rest_pager(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendBucketAggregatedList( + items={ + "a": compute.BackendBucketsScopedList(), + "b": compute.BackendBucketsScopedList(), + "c": compute.BackendBucketsScopedList(), + }, + next_page_token="abc", + ), + compute.BackendBucketAggregatedList( + items={}, + next_page_token="def", + ), + compute.BackendBucketAggregatedList( + items={ + "g": compute.BackendBucketsScopedList(), + }, + next_page_token="ghi", + ), + compute.BackendBucketAggregatedList( + items={ + "h": compute.BackendBucketsScopedList(), + "i": compute.BackendBucketsScopedList(), + }, + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + compute.BackendBucketAggregatedList.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1"} + + pager = client.aggregated_list(request=sample_request) + + assert isinstance(pager.get("a"), compute.BackendBucketsScopedList) + assert pager.get("h") is None + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, tuple) for i in results) + for result in results: + assert isinstance(result, tuple) + assert tuple(type(t) for t in result) == ( + str, + compute.BackendBucketsScopedList, + ) + + assert pager.get("a") is None + assert isinstance(pager.get("h"), compute.BackendBucketsScopedList) + + pages = list(client.aggregated_list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_delete_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3149,50 +3427,308 @@ def test_insert_unary_rest_flattened(): bucket_name="bucket_name_value" ), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertBackendBucketRequest(), + project="project_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields(request_type=compute.ListBackendBucketsRequest): + transport_class = transports.BackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set(("project",)) + ) + + +def test_list_rest_flattened(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListBackendBucketsRequest(), + project="project_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + compute.BackendBucket(), + ], + next_page_token="abc", + ), + compute.BackendBucketList( + items=[], + next_page_token="def", + ), + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + ], + next_page_token="ghi", + ), + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + ], + ), + ) + # Two responses for two calls + response = response + response - client.insert_unary(**mock_args) + # Wrap the values into proper Response objs + response = tuple(compute.BackendBucketList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendBuckets" - % client.transport._host, - args[1], - ) + sample_request = {"project": "sample1"} + pager = client.list(request=sample_request) -def test_insert_unary_rest_flattened_error(transport: str = "rest"): - client = BackendBucketsClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendBucket) for i in results) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.insert_unary( - compute.InsertBackendBucketRequest(), - project="project_value", - backend_bucket_resource=compute.BackendBucket( - bucket_name="bucket_name_value" - ), - ) + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_rest_use_cached_wrapped_rpc(): +def test_list_usable_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -3206,29 +3742,31 @@ def test_list_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list in client._transport._wrapped_methods + assert client._transport.list_usable in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list] = mock_rpc + client._transport._wrapped_methods[client._transport.list_usable] = mock_rpc request = {} - client.list(request) + client.list_usable(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list(request) + client.list_usable(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_rest_required_fields(request_type=compute.ListBackendBucketsRequest): +def test_list_usable_rest_required_fields( + request_type=compute.ListUsableBackendBucketsRequest, +): transport_class = transports.BackendBucketsRestTransport request_init = {} @@ -3243,7 +3781,7 @@ def test_list_rest_required_fields(request_type=compute.ListBackendBucketsReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).list_usable._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -3252,7 +3790,7 @@ def test_list_rest_required_fields(request_type=compute.ListBackendBucketsReques unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list._get_unset_required_fields(jsonified_request) + ).list_usable._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -3276,7 +3814,7 @@ def test_list_rest_required_fields(request_type=compute.ListBackendBucketsReques request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = compute.BackendBucketList() + return_value = compute.BackendBucketListUsable() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -3297,26 +3835,26 @@ def test_list_rest_required_fields(request_type=compute.ListBackendBucketsReques response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.BackendBucketList.pb(return_value) + return_value = compute.BackendBucketListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list(request) + response = client.list_usable(request) expected_params = [] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_rest_unset_required_fields(): +def test_list_usable_rest_unset_required_fields(): transport = transports.BackendBucketsRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list._get_unset_required_fields({}) + unset_fields = transport.list_usable._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -3331,7 +3869,7 @@ def test_list_rest_unset_required_fields(): ) -def test_list_rest_flattened(): +def test_list_usable_rest_flattened(): client = BackendBucketsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -3340,7 +3878,7 @@ def test_list_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.BackendBucketList() + return_value = compute.BackendBucketListUsable() # get arguments that satisfy an http rule for this method sample_request = {"project": "sample1"} @@ -3355,26 +3893,26 @@ def test_list_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.BackendBucketList.pb(return_value) + return_value = compute.BackendBucketListUsable.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list(**mock_args) + client.list_usable(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/compute/v1/projects/{project}/global/backendBuckets" + "%s/compute/v1/projects/{project}/global/backendBuckets/listUsable" % client.transport._host, args[1], ) -def test_list_rest_flattened_error(transport: str = "rest"): +def test_list_usable_rest_flattened_error(transport: str = "rest"): client = BackendBucketsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3383,13 +3921,13 @@ def test_list_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list( - compute.ListBackendBucketsRequest(), + client.list_usable( + compute.ListUsableBackendBucketsRequest(), project="project_value", ) -def test_list_rest_pager(transport: str = "rest"): +def test_list_usable_rest_pager(transport: str = "rest"): client = BackendBucketsClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -3401,7 +3939,7 @@ def test_list_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - compute.BackendBucketList( + compute.BackendBucketListUsable( items=[ compute.BackendBucket(), compute.BackendBucket(), @@ -3409,17 +3947,17 @@ def test_list_rest_pager(transport: str = "rest"): ], next_page_token="abc", ), - compute.BackendBucketList( + compute.BackendBucketListUsable( items=[], next_page_token="def", ), - compute.BackendBucketList( + compute.BackendBucketListUsable( items=[ compute.BackendBucket(), ], next_page_token="ghi", ), - compute.BackendBucketList( + compute.BackendBucketListUsable( items=[ compute.BackendBucket(), compute.BackendBucket(), @@ -3430,7 +3968,7 @@ def test_list_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple(compute.BackendBucketList.to_json(x) for x in response) + response = tuple(compute.BackendBucketListUsable.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") @@ -3439,13 +3977,13 @@ def test_list_rest_pager(transport: str = "rest"): sample_request = {"project": "sample1"} - pager = client.list(request=sample_request) + pager = client.list_usable(request=sample_request) results = list(pager) assert len(results) == 6 assert all(isinstance(i, compute.BackendBucket) for i in results) - pages = list(client.list(request=sample_request).pages) + pages = list(client.list_usable(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token @@ -5270,57 +5808,212 @@ def get_message_fields(field): if isinstance(value, dict): result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["signed_url_key_resource"][field])): + del request_init["signed_url_key_resource"][field][i][subfield] + else: + del request_init["signed_url_key_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.add_signed_url_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_add_signed_url_key_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_add_signed_url_key" + ) as post, + mock.patch.object( + transports.BackendBucketsRestInterceptor, + "post_add_signed_url_key_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.BackendBucketsRestInterceptor, "pre_add_signed_url_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.AddSignedUrlKeyBackendBucketRequest.pb( + compute.AddSignedUrlKeyBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.AddSignedUrlKeyBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.add_signed_url_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_aggregated_list_rest_bad_request( + request_type=compute.AggregatedListBackendBucketsRequest, +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.aggregated_list(request) + - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["signed_url_key_resource"][field])): - del request_init["signed_url_key_resource"][field][i][subfield] - else: - del request_init["signed_url_key_resource"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + compute.AggregatedListBackendBucketsRequest, + dict, + ], +) +def test_aggregated_list_rest_call_success(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = compute.Operation( - client_operation_id="client_operation_id_value", - creation_timestamp="creation_timestamp_value", - description="description_value", - end_time="end_time_value", - http_error_message="http_error_message_value", - http_error_status_code=2374, - id=205, - insert_time="insert_time_value", + return_value = compute.BackendBucketAggregatedList( + id="id_value", kind="kind_value", - name="name_value", - operation_group_id="operation_group_id_value", - operation_type="operation_type_value", - progress=885, - region="region_value", + next_page_token="next_page_token_value", self_link="self_link_value", - start_time="start_time_value", - status=compute.Operation.Status.DONE, - status_message="status_message_value", - target_id=947, - target_link="target_link_value", - user="user_value", - zone="zone_value", ) # Wrap the value into a proper Response obj @@ -5328,41 +6021,23 @@ def get_message_fields(field): response_value.status_code = 200 # Convert return value to protobuf type - return_value = compute.Operation.pb(return_value) + return_value = compute.BackendBucketAggregatedList.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.add_signed_url_key(request) + response = client.aggregated_list(request) # Establish that the response is the type that we expect. - assert isinstance(response, extended_operation.ExtendedOperation) - assert response.client_operation_id == "client_operation_id_value" - assert response.creation_timestamp == "creation_timestamp_value" - assert response.description == "description_value" - assert response.end_time == "end_time_value" - assert response.http_error_message == "http_error_message_value" - assert response.http_error_status_code == 2374 - assert response.id == 205 - assert response.insert_time == "insert_time_value" + assert isinstance(response, pagers.AggregatedListPager) + assert response.id == "id_value" assert response.kind == "kind_value" - assert response.name == "name_value" - assert response.operation_group_id == "operation_group_id_value" - assert response.operation_type == "operation_type_value" - assert response.progress == 885 - assert response.region == "region_value" + assert response.next_page_token == "next_page_token_value" assert response.self_link == "self_link_value" - assert response.start_time == "start_time_value" - assert response.status == compute.Operation.Status.DONE - assert response.status_message == "status_message_value" - assert response.target_id == 947 - assert response.target_link == "target_link_value" - assert response.user == "user_value" - assert response.zone == "zone_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_add_signed_url_key_rest_interceptors(null_interceptor): +def test_aggregated_list_rest_interceptors(null_interceptor): transport = transports.BackendBucketsRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None @@ -5375,21 +6050,21 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.BackendBucketsRestInterceptor, "post_add_signed_url_key" + transports.BackendBucketsRestInterceptor, "post_aggregated_list" ) as post, mock.patch.object( transports.BackendBucketsRestInterceptor, - "post_add_signed_url_key_with_metadata", + "post_aggregated_list_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.BackendBucketsRestInterceptor, "pre_add_signed_url_key" + transports.BackendBucketsRestInterceptor, "pre_aggregated_list" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = compute.AddSignedUrlKeyBackendBucketRequest.pb( - compute.AddSignedUrlKeyBackendBucketRequest() + pb_message = compute.AggregatedListBackendBucketsRequest.pb( + compute.AggregatedListBackendBucketsRequest() ) transcode.return_value = { "method": "post", @@ -5401,19 +6076,24 @@ def test_add_signed_url_key_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = compute.Operation.to_json(compute.Operation()) + return_value = compute.BackendBucketAggregatedList.to_json( + compute.BackendBucketAggregatedList() + ) req.return_value.content = return_value - request = compute.AddSignedUrlKeyBackendBucketRequest() + request = compute.AggregatedListBackendBucketsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = compute.Operation() - post_with_metadata.return_value = compute.Operation(), metadata + post.return_value = compute.BackendBucketAggregatedList() + post_with_metadata.return_value = ( + compute.BackendBucketAggregatedList(), + metadata, + ) - client.add_signed_url_key( + client.aggregated_list( request, metadata=[ ("key", "val"), @@ -5824,6 +6504,7 @@ def test_get_rest_call_success(request_type): kind="kind_value", load_balancing_scheme="load_balancing_scheme_value", name="name_value", + region="region_value", self_link="self_link_value", ) @@ -5852,6 +6533,7 @@ def test_get_rest_call_success(request_type): assert response.kind == "kind_value" assert response.load_balancing_scheme == "load_balancing_scheme_value" assert response.name == "name_value" + assert response.region == "region_value" assert response.self_link == "self_link_value" @@ -6130,6 +6812,7 @@ def test_insert_rest_call_success(request_type): "load_balancing_scheme": "load_balancing_scheme_value", "name": "name_value", "params": {"resource_manager_tags": {}}, + "region": "region_value", "self_link": "self_link_value", "used_by": [{"reference": "reference_value"}], } @@ -6468,6 +7151,144 @@ def test_list_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_list_usable_rest_bad_request( + request_type=compute.ListUsableBackendBucketsRequest, +): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_usable(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListUsableBackendBucketsRequest, + dict, + ], +) +def test_list_usable_rest_call_success(request_type): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketListUsable( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketListUsable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_usable(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsablePager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_rest_interceptors(null_interceptor): + transport = transports.BackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.BackendBucketsRestInterceptor(), + ) + client = BackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_list_usable" + ) as post, + mock.patch.object( + transports.BackendBucketsRestInterceptor, "post_list_usable_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.BackendBucketsRestInterceptor, "pre_list_usable" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListUsableBackendBucketsRequest.pb( + compute.ListUsableBackendBucketsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.BackendBucketListUsable.to_json( + compute.BackendBucketListUsable() + ) + req.return_value.content = return_value + + request = compute.ListUsableBackendBucketsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendBucketListUsable() + post_with_metadata.return_value = compute.BackendBucketListUsable(), metadata + + client.list_usable( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_patch_rest_bad_request(request_type=compute.PatchBackendBucketRequest): client = BackendBucketsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -6548,6 +7369,7 @@ def test_patch_rest_call_success(request_type): "load_balancing_scheme": "load_balancing_scheme_value", "name": "name_value", "params": {"resource_manager_tags": {}}, + "region": "region_value", "self_link": "self_link_value", "used_by": [{"reference": "reference_value"}], } @@ -7545,6 +8367,7 @@ def test_update_rest_call_success(request_type): "load_balancing_scheme": "load_balancing_scheme_value", "name": "name_value", "params": {"resource_manager_tags": {}}, + "region": "region_value", "self_link": "self_link_value", "used_by": [{"reference": "reference_value"}], } @@ -7780,6 +8603,26 @@ def test_add_signed_url_key_unary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_aggregated_list_empty_call_rest(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.aggregated_list), "__call__") as call: + client.aggregated_list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.AggregatedListBackendBucketsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_delete_unary_empty_call_rest(): @@ -7902,6 +8745,26 @@ def test_list_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_usable_empty_call_rest(): + client = BackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_usable), "__call__") as call: + client.list_usable(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListUsableBackendBucketsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_patch_unary_empty_call_rest(): @@ -8029,12 +8892,14 @@ def test_backend_buckets_base_transport(): # raise NotImplementedError. methods = ( "add_signed_url_key", + "aggregated_list", "delete", "delete_signed_url_key", "get", "get_iam_policy", "insert", "list", + "list_usable", "patch", "set_edge_security_policy", "set_iam_policy", @@ -8186,6 +9051,9 @@ def test_backend_buckets_client_transport_session_collision(transport_name): session1 = client1.transport.add_signed_url_key._session session2 = client2.transport.add_signed_url_key._session assert session1 != session2 + session1 = client1.transport.aggregated_list._session + session2 = client2.transport.aggregated_list._session + assert session1 != session2 session1 = client1.transport.delete._session session2 = client2.transport.delete._session assert session1 != session2 @@ -8204,6 +9072,9 @@ def test_backend_buckets_client_transport_session_collision(transport_name): session1 = client1.transport.list._session session2 = client2.transport.list._session assert session1 != session2 + session1 = client1.transport.list_usable._session + session2 = client2.transport.list_usable._session + assert session1 != session2 session1 = client1.transport.patch._session session2 = client2.transport.patch._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py index b963be850f90..a5f8bc1a24a5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_backend_services.py @@ -8016,12 +8016,16 @@ def test_insert_rest_call_success(request_type): "max_connections": 1608, "max_connections_per_endpoint": 2990, "max_connections_per_instance": 2978, + "max_in_flight_requests": 2356, + "max_in_flight_requests_per_endpoint": 3738, + "max_in_flight_requests_per_instance": 3726, "max_rate": 849, "max_rate_per_endpoint": 0.22310000000000002, "max_rate_per_instance": 0.22190000000000001, "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "traffic_duration": "traffic_duration_value", } ], "cdn_policy": { @@ -8726,12 +8730,16 @@ def test_patch_rest_call_success(request_type): "max_connections": 1608, "max_connections_per_endpoint": 2990, "max_connections_per_instance": 2978, + "max_in_flight_requests": 2356, + "max_in_flight_requests_per_endpoint": 3738, + "max_in_flight_requests_per_instance": 3726, "max_rate": 849, "max_rate_per_endpoint": 0.22310000000000002, "max_rate_per_instance": 0.22190000000000001, "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "traffic_duration": "traffic_duration_value", } ], "cdn_policy": { @@ -10130,12 +10138,16 @@ def test_update_rest_call_success(request_type): "max_connections": 1608, "max_connections_per_endpoint": 2990, "max_connections_per_instance": 2978, + "max_in_flight_requests": 2356, + "max_in_flight_requests_per_endpoint": 3738, + "max_in_flight_requests_per_instance": 3726, "max_rate": 849, "max_rate_per_endpoint": 0.22310000000000002, "max_rate_per_instance": 0.22190000000000001, "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "traffic_duration": "traffic_duration_value", } ], "cdn_policy": { diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py index 49a4275658b2..1f2268a8ea8a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_disks.py @@ -1963,7 +1963,9 @@ def test_bulk_insert_rest_flattened(): project="project_value", zone="zone_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) mock_args.update(sample_request) @@ -2005,7 +2007,9 @@ def test_bulk_insert_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) @@ -2169,7 +2173,9 @@ def test_bulk_insert_unary_rest_flattened(): project="project_value", zone="zone_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) mock_args.update(sample_request) @@ -2211,7 +2217,9 @@ def test_bulk_insert_unary_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) @@ -8025,187 +8033,613 @@ def test_update_unary_rest_flattened_error(transport: str = "rest"): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = DisksClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.DisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DisksClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.DisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DisksClient( - client_options=options, - transport=transport, - ) + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = DisksClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.DisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DisksClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.update_kms_key(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeyDiskRequest, +): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - client = DisksClient(transport=transport) - assert client.transport is transport + # verify fields with default values are dropped -@pytest.mark.parametrize( - "transport_class", - [ - transports.DisksRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_transport_kind_rest(): - transport = DisksClient.get_transport_class("rest")( + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" -def test_add_resource_policies_rest_bad_request( - request_type=compute.AddResourcePoliciesDiskRequest, -): client = DisksClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.add_resource_policies(request) + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.parametrize( - "request_type", - [ - compute.AddResourcePoliciesDiskRequest, - dict, - ], -) -def test_add_resource_policies_rest_call_success(request_type): - client = DisksClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} - request_init["disks_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Determine if the message type is proto-plus or protobuf - test_field = compute.AddResourcePoliciesDiskRequest.meta.fields[ - "disks_add_resource_policies_request_resource" - ] + response = client.update_kms_key(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "diskUpdateKmsKeyRequestResource", + "project", + "zone", + ) + ) + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "disks_add_resource_policies_request_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_update_kms_key_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeyDiskRequest(), + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeyDiskRequest, +): + transport_class = transports.DisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "diskUpdateKmsKeyRequestResource", + "project", + "zone", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeyDiskRequest(), + project="project_value", + zone="zone_value", + disk="disk_value", + disk_update_kms_key_request_resource=compute.DiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DisksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DisksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DisksClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DisksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DisksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DisksRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = DisksClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_add_resource_policies_rest_bad_request( + request_type=compute.AddResourcePoliciesDiskRequest, +): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.add_resource_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.AddResourcePoliciesDiskRequest, + dict, + ], +) +def test_add_resource_policies_rest_call_success(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["disks_add_resource_policies_request_resource"] = { + "resource_policies": ["resource_policies_value1", "resource_policies_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesDiskRequest.meta.fields[ + "disks_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disks_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, } ) @@ -8537,7 +8971,15 @@ def test_bulk_insert_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} request_init["bulk_insert_disk_resource_resource"] = { - "source_consistency_group_policy": "source_consistency_group_policy_value" + "instant_snapshot_group_parameters": { + "source_instant_snapshot_group": "source_instant_snapshot_group_value" + }, + "snapshot_group_parameters": { + "replica_zones": ["replica_zones_value1", "replica_zones_value2"], + "source_snapshot_group": "source_snapshot_group_value", + "type_": "type__value", + }, + "source_consistency_group_policy": "source_consistency_group_policy_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -9049,6 +9491,7 @@ def test_create_snapshot_rest_call_success(request_type): "location_hint": "location_hint_value", "name": "name_value", "params": {"resource_manager_tags": {}}, + "region": "region_value", "satisfies_pzi": True, "satisfies_pzs": True, "self_link": "self_link_value", @@ -9059,6 +9502,8 @@ def test_create_snapshot_rest_call_success(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_group_id": "snapshot_group_id_value", + "snapshot_group_name": "snapshot_group_name_value", "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, @@ -12350,6 +12795,252 @@ def test_update_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request(request_type=compute.UpdateKmsKeyDiskRequest): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeyDiskRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} + request_init["disk_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeyDiskRequest.meta.fields[ + "disk_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "disk_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["disk_update_kms_key_request_resource"][field]) + ): + del request_init["disk_update_kms_key_request_resource"][field][i][ + subfield + ] + else: + del request_init["disk_update_kms_key_request_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.DisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.DisksRestInterceptor(), + ) + client = DisksClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DisksRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.DisksRestInterceptor, "post_update_kms_key_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.DisksRestInterceptor, "pre_update_kms_key") as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeyDiskRequest.pb( + compute.UpdateKmsKeyDiskRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeyDiskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = DisksClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -12749,6 +13440,26 @@ def test_update_unary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = DisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeyDiskRequest() + + assert args[0] == request_msg + + def test_disks_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -12790,6 +13501,7 @@ def test_disks_base_transport(): "stop_group_async_replication", "test_iam_permissions", "update", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -12990,6 +13702,9 @@ def test_disks_client_transport_session_collision(transport_name): session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_future_reservations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_future_reservations.py index fb38e9b35fdb..c1ecd76ec37a 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_future_reservations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_future_reservations.py @@ -4339,6 +4339,7 @@ def test_get_rest_call_success(request_type): return_value = compute.FutureReservation( auto_created_reservations_delete_time="auto_created_reservations_delete_time_value", auto_delete_auto_created_reservations=True, + confidential_compute_type="confidential_compute_type_value", creation_timestamp="creation_timestamp_value", deployment_type="deployment_type_value", description="description_value", @@ -4376,6 +4377,7 @@ def test_get_rest_call_success(request_type): == "auto_created_reservations_delete_time_value" ) assert response.auto_delete_auto_created_reservations is True + assert response.confidential_compute_type == "confidential_compute_type_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.deployment_type == "deployment_type_value" assert response.description == "description_value" @@ -4518,6 +4520,7 @@ def test_insert_rest_call_success(request_type): "commitment_plan": "commitment_plan_value", "previous_commitment_terms": "previous_commitment_terms_value", }, + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "deployment_type": "deployment_type_value", "description": "description_value", @@ -4526,6 +4529,7 @@ def test_insert_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "name_prefix": "name_prefix_value", + "params": {"resource_manager_tags": {}}, "planning_status": "planning_status_value", "reservation_mode": "reservation_mode_value", "reservation_name": "reservation_name_value", @@ -5002,6 +5006,7 @@ def test_update_rest_call_success(request_type): "commitment_plan": "commitment_plan_value", "previous_commitment_terms": "previous_commitment_terms_value", }, + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "deployment_type": "deployment_type_value", "description": "description_value", @@ -5010,6 +5015,7 @@ def test_update_rest_call_success(request_type): "kind": "kind_value", "name": "name_value", "name_prefix": "name_prefix_value", + "params": {"resource_manager_tags": {}}, "planning_status": "planning_status_value", "reservation_mode": "reservation_mode_value", "reservation_name": "reservation_name_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py index 83b1706a529d..9c44545741de 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_manager_resize_requests.py @@ -3579,6 +3579,7 @@ def test_get_rest_call_success(request_type): id=205, kind="kind_value", name="name_value", + region="region_value", resize_by=972, self_link="self_link_value", self_link_with_id="self_link_with_id_value", @@ -3605,6 +3606,7 @@ def test_get_rest_call_success(request_type): assert response.id == 205 assert response.kind == "kind_value" assert response.name == "name_value" + assert response.region == "region_value" assert response.resize_by == 972 assert response.self_link == "self_link_value" assert response.self_link_with_id == "self_link_with_id_value" @@ -3736,6 +3738,7 @@ def test_insert_rest_call_success(request_type): "id": 205, "kind": "kind_value", "name": "name_value", + "region": "region_value", "requested_run_duration": {"nanos": 543, "seconds": 751}, "resize_by": 972, "self_link": "self_link_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py index 43af0e917d14..fe3428500587 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_group_managers.py @@ -12846,6 +12846,21 @@ def test_insert_rest_call_success(request_type): "in_progress": True, "last_progress_check": {"error": {}, "timestamp": "timestamp_value"}, }, + "current_instance_statuses": { + "deprovisioning": 1520, + "non_existent": 1310, + "pending": 741, + "pending_stop": 1290, + "provisioning": 1319, + "repairing": 961, + "running": 769, + "staging": 749, + "stopped": 767, + "stopping": 884, + "suspended": 971, + "suspending": 1088, + "terminated": 1069, + }, "is_stable": True, "stateful": { "has_stateful_config": True, @@ -13814,6 +13829,21 @@ def test_patch_rest_call_success(request_type): "in_progress": True, "last_progress_check": {"error": {}, "timestamp": "timestamp_value"}, }, + "current_instance_statuses": { + "deprovisioning": 1520, + "non_existent": 1310, + "pending": 741, + "pending_stop": 1290, + "provisioning": 1319, + "repairing": 961, + "running": 769, + "staging": 749, + "stopped": 767, + "stopping": 884, + "suspended": 971, + "suspending": 1088, + "terminated": 1069, + }, "is_stable": True, "stateful": { "has_stateful_config": True, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py index fd1d0dd27dbd..1f3c1f330e3b 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -4139,6 +4139,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py index 255a71a8a812..1ea314a42073 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instances.py @@ -21496,6 +21496,7 @@ def test_add_network_interface_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -22558,6 +22559,7 @@ def test_bulk_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -24651,6 +24653,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -31013,6 +31016,7 @@ def test_update_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, @@ -31883,6 +31887,7 @@ def test_update_network_interface_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshot_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshot_groups.py new file mode 100644 index 000000000000..fec94ddfeea4 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshot_groups.py @@ -0,0 +1,5101 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.instant_snapshot_groups import ( + InstantSnapshotGroupsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert InstantSnapshotGroupsClient._get_default_mtls_endpoint(None) is None + assert ( + InstantSnapshotGroupsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + InstantSnapshotGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + InstantSnapshotGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + InstantSnapshotGroupsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + InstantSnapshotGroupsClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + InstantSnapshotGroupsClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + InstantSnapshotGroupsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + InstantSnapshotGroupsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert InstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + InstantSnapshotGroupsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert InstantSnapshotGroupsClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert InstantSnapshotGroupsClient._get_client_cert_source(None, False) is None + assert ( + InstantSnapshotGroupsClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + InstantSnapshotGroupsClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + InstantSnapshotGroupsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + InstantSnapshotGroupsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + InstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstantSnapshotGroupsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = InstantSnapshotGroupsClient._DEFAULT_UNIVERSE + default_endpoint = InstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = InstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == InstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == InstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == InstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + InstantSnapshotGroupsClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + InstantSnapshotGroupsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + InstantSnapshotGroupsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + InstantSnapshotGroupsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + InstantSnapshotGroupsClient._get_universe_domain(None, None) + == InstantSnapshotGroupsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + InstantSnapshotGroupsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = InstantSnapshotGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = InstantSnapshotGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InstantSnapshotGroupsClient, "rest"), + ], +) +def test_instant_snapshot_groups_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.InstantSnapshotGroupsRestTransport, "rest"), + ], +) +def test_instant_snapshot_groups_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (InstantSnapshotGroupsClient, "rest"), + ], +) +def test_instant_snapshot_groups_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_instant_snapshot_groups_client_get_transport_class(): + transport = InstantSnapshotGroupsClient.get_transport_class() + available_transports = [ + transports.InstantSnapshotGroupsRestTransport, + ] + assert transport in available_transports + + transport = InstantSnapshotGroupsClient.get_transport_class("rest") + assert transport == transports.InstantSnapshotGroupsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstantSnapshotGroupsClient, + transports.InstantSnapshotGroupsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + InstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstantSnapshotGroupsClient), +) +def test_instant_snapshot_groups_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(InstantSnapshotGroupsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(InstantSnapshotGroupsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + InstantSnapshotGroupsClient, + transports.InstantSnapshotGroupsRestTransport, + "rest", + "true", + ), + ( + InstantSnapshotGroupsClient, + transports.InstantSnapshotGroupsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + InstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstantSnapshotGroupsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_instant_snapshot_groups_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [InstantSnapshotGroupsClient]) +@mock.patch.object( + InstantSnapshotGroupsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(InstantSnapshotGroupsClient), +) +def test_instant_snapshot_groups_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [InstantSnapshotGroupsClient]) +@mock.patch.object( + InstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(InstantSnapshotGroupsClient), +) +def test_instant_snapshot_groups_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = InstantSnapshotGroupsClient._DEFAULT_UNIVERSE + default_endpoint = InstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = InstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + InstantSnapshotGroupsClient, + transports.InstantSnapshotGroupsRestTransport, + "rest", + ), + ], +) +def test_instant_snapshot_groups_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + InstantSnapshotGroupsClient, + transports.InstantSnapshotGroupsRestTransport, + "rest", + None, + ), + ], +) +def test_instant_snapshot_groups_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields( + request_type=compute.DeleteInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["instant_snapshot_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instantSnapshotGroup"] = "instant_snapshot_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instantSnapshotGroup" in jsonified_request + assert jsonified_request["instantSnapshotGroup"] == "instant_snapshot_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instantSnapshotGroup", + "project", + "zone", + ) + ) + ) + + +def test_delete_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{instant_snapshot_group}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["instant_snapshot_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instantSnapshotGroup"] = "instant_snapshot_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instantSnapshotGroup" in jsonified_request + assert jsonified_request["instantSnapshotGroup"] == "instant_snapshot_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instantSnapshotGroup", + "project", + "zone", + ) + ) + ) + + +def test_delete_unary_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{instant_snapshot_group}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetInstantSnapshotGroupRequest): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["instant_snapshot_group"] = "" + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instantSnapshotGroup"] = "instant_snapshot_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instantSnapshotGroup" in jsonified_request + assert jsonified_request["instantSnapshotGroup"] == "instant_snapshot_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstantSnapshotGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstantSnapshotGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instantSnapshotGroup", + "project", + "zone", + ) + ) + ) + + +def test_get_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstantSnapshotGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstantSnapshotGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{instant_snapshot_group}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=compute.GetIamPolicyInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("optionsRequestedPolicyVersion",)) + & set( + ( + "project", + "resource", + "zone", + ) + ) + ) + + +def test_get_iam_policy_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + resource="resource_value", + ) + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields( + request_type=compute.InsertInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "source_consistency_group", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sourceConsistencyGroup", + ) + ) + & set( + ( + "instantSnapshotGroupResource", + "project", + "zone", + ) + ) + ) + + +def test_insert_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "source_consistency_group", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sourceConsistencyGroup", + ) + ) + & set( + ( + "instantSnapshotGroupResource", + "project", + "zone", + ) + ) + ) + + +def test_insert_unary_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields( + request_type=compute.ListInstantSnapshotGroupsRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ListInstantSnapshotGroups() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.ListInstantSnapshotGroups.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "zone", + ) + ) + ) + + +def test_list_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.ListInstantSnapshotGroups() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.ListInstantSnapshotGroups.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListInstantSnapshotGroupsRequest(), + project="project_value", + zone="zone_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ListInstantSnapshotGroups( + items=[ + compute.InstantSnapshotGroup(), + compute.InstantSnapshotGroup(), + compute.InstantSnapshotGroup(), + ], + next_page_token="abc", + ), + compute.ListInstantSnapshotGroups( + items=[], + next_page_token="def", + ), + compute.ListInstantSnapshotGroups( + items=[ + compute.InstantSnapshotGroup(), + ], + next_page_token="ghi", + ), + compute.ListInstantSnapshotGroups( + items=[ + compute.InstantSnapshotGroup(), + compute.InstantSnapshotGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ListInstantSnapshotGroups.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "zone": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstantSnapshotGroup) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "resource", + "zone", + "zoneSetPolicyRequestResource", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + resource="resource_value", + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + resource="resource_value", + zone_set_policy_request_resource=compute.ZoneSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.test_iam_permissions] = ( + mock_rpc + ) + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsInstantSnapshotGroupRequest, +): + transport_class = transports.InstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["resource"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["resource"] = "resource_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "resource", + "testPermissionsRequestResource", + "zone", + ) + ) + ) + + +def test_test_iam_permissions_rest_flattened(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/instantSnapshotGroups/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsInstantSnapshotGroupRequest(), + project="project_value", + zone="zone_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstantSnapshotGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstantSnapshotGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = InstantSnapshotGroupsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = InstantSnapshotGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.InstantSnapshotGroupsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = InstantSnapshotGroupsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_delete_rest_bad_request( + request_type=compute.DeleteInstantSnapshotGroupRequest, +): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteInstantSnapshotGroupRequest, + dict, + ], +) +def test_delete_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_delete" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_delete" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.DeleteInstantSnapshotGroupRequest.pb( + compute.DeleteInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.DeleteInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_rest_bad_request(request_type=compute.GetInstantSnapshotGroupRequest): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetInstantSnapshotGroupRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value", + description="description_value", + id=205, + kind="kind_value", + name="name_value", + region="region_value", + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + source_consistency_group="source_consistency_group_value", + status="status_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstantSnapshotGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstantSnapshotGroup) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.source_consistency_group == "source_consistency_group_value" + assert response.status == "status_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_get" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_get" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetInstantSnapshotGroupRequest.pb( + compute.GetInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.InstantSnapshotGroup.to_json( + compute.InstantSnapshotGroup() + ) + req.return_value.content = return_value + + request = compute.GetInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstantSnapshotGroup() + post_with_metadata.return_value = compute.InstantSnapshotGroup(), metadata + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=compute.GetIamPolicyInstantSnapshotGroupRequest, +): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetIamPolicyInstantSnapshotGroupRequest, + dict, + ], +) +def test_get_iam_policy_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_get_iam_policy" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_get_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetIamPolicyInstantSnapshotGroupRequest.pb( + compute.GetIamPolicyInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.GetIamPolicyInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_insert_rest_bad_request( + request_type=compute.InsertInstantSnapshotGroupRequest, +): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.insert(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertInstantSnapshotGroupRequest, + dict, + ], +) +def test_insert_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["instant_snapshot_group_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "region": "region_value", + "resource_status": { + "consistency_membership_resolution_time": "consistency_membership_resolution_time_value", + "source_info": { + "consistency_group": "consistency_group_value", + "consistency_group_id": "consistency_group_id_value", + }, + }, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "source_consistency_group": "source_consistency_group_value", + "status": "status_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertInstantSnapshotGroupRequest.meta.fields[ + "instant_snapshot_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instant_snapshot_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instant_snapshot_group_resource"][field]) + ): + del request_init["instant_snapshot_group_resource"][field][i][ + subfield + ] + else: + del request_init["instant_snapshot_group_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_insert" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_insert" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.InsertInstantSnapshotGroupRequest.pb( + compute.InsertInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.InsertInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_rest_bad_request(request_type=compute.ListInstantSnapshotGroupsRequest): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListInstantSnapshotGroupsRequest, + dict, + ], +) +def test_list_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.ListInstantSnapshotGroups( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.ListInstantSnapshotGroups.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_list" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_list" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListInstantSnapshotGroupsRequest.pb( + compute.ListInstantSnapshotGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.ListInstantSnapshotGroups.to_json( + compute.ListInstantSnapshotGroups() + ) + req.return_value.content = return_value + + request = compute.ListInstantSnapshotGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ListInstantSnapshotGroups() + post_with_metadata.return_value = compute.ListInstantSnapshotGroups(), metadata + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + request_type=compute.SetIamPolicyInstantSnapshotGroupRequest, +): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyInstantSnapshotGroupRequest, + dict, + ], +) +def test_set_iam_policy_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["zone_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyInstantSnapshotGroupRequest.meta.fields[ + "zone_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "zone_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["zone_set_policy_request_resource"][field]) + ): + del request_init["zone_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["zone_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_set_iam_policy" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_set_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.SetIamPolicyInstantSnapshotGroupRequest.pb( + compute.SetIamPolicyInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.SetIamPolicyInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + request_type=compute.TestIamPermissionsInstantSnapshotGroupRequest, +): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.TestIamPermissionsInstantSnapshotGroupRequest, + dict, + ], +) +def test_test_iam_permissions_rest_call_success(request_type): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsInstantSnapshotGroupRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.InstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.InstantSnapshotGroupsRestInterceptor(), + ) + client = InstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "post_test_iam_permissions" + ) as post, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.InstantSnapshotGroupsRestInterceptor, "pre_test_iam_permissions" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.TestIamPermissionsInstantSnapshotGroupRequest.pb( + compute.TestIamPermissionsInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) + req.return_value.content = return_value + + request = compute.TestIamPermissionsInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_unary_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete), "__call__") as call: + client.delete_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.DeleteInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetIamPolicyInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_insert_unary_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.insert), "__call__") as call: + client.insert_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.InsertInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list), "__call__") as call: + client.list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListInstantSnapshotGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.SetIamPolicyInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.TestIamPermissionsInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +def test_instant_snapshot_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.InstantSnapshotGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_instant_snapshot_groups_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.instant_snapshot_groups.transports.InstantSnapshotGroupsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.InstantSnapshotGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete", + "get", + "get_iam_policy", + "insert", + "list", + "set_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_instant_snapshot_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.instant_snapshot_groups.transports.InstantSnapshotGroupsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstantSnapshotGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_instant_snapshot_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.instant_snapshot_groups.transports.InstantSnapshotGroupsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.InstantSnapshotGroupsTransport() + adc.assert_called_once() + + +def test_instant_snapshot_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + InstantSnapshotGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_instant_snapshot_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.InstantSnapshotGroupsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instant_snapshot_groups_host_no_port(transport_name): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instant_snapshot_groups_host_with_port(transport_name): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_instant_snapshot_groups_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = InstantSnapshotGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = InstantSnapshotGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = InstantSnapshotGroupsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = InstantSnapshotGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = InstantSnapshotGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = InstantSnapshotGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = InstantSnapshotGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = InstantSnapshotGroupsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = InstantSnapshotGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = InstantSnapshotGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = InstantSnapshotGroupsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = InstantSnapshotGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = InstantSnapshotGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = InstantSnapshotGroupsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = InstantSnapshotGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = InstantSnapshotGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = InstantSnapshotGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.InstantSnapshotGroupsTransport, "_prep_wrapped_messages" + ) as prep: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.InstantSnapshotGroupsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = InstantSnapshotGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = InstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (InstantSnapshotGroupsClient, transports.InstantSnapshotGroupsRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py index 15ef714dd234..d652fe371367 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_instant_snapshots.py @@ -4280,6 +4280,8 @@ def test_get_rest_call_success(request_type): self_link_with_id="self_link_with_id_value", source_disk="source_disk_value", source_disk_id="source_disk_id_value", + source_instant_snapshot_group="source_instant_snapshot_group_value", + source_instant_snapshot_group_id="source_instant_snapshot_group_id_value", status="status_value", zone="zone_value", ) @@ -4313,6 +4315,13 @@ def test_get_rest_call_success(request_type): assert response.self_link_with_id == "self_link_with_id_value" assert response.source_disk == "source_disk_value" assert response.source_disk_id == "source_disk_id_value" + assert ( + response.source_instant_snapshot_group == "source_instant_snapshot_group_value" + ) + assert ( + response.source_instant_snapshot_group_id + == "source_instant_snapshot_group_id_value" + ) assert response.status == "status_value" assert response.zone == "zone_value" @@ -4571,6 +4580,8 @@ def test_insert_rest_call_success(request_type): "self_link_with_id": "self_link_with_id_value", "source_disk": "source_disk_value", "source_disk_id": "source_disk_id_value", + "source_instant_snapshot_group": "source_instant_snapshot_group_value", + "source_instant_snapshot_group_id": "source_instant_snapshot_group_id_value", "status": "status_value", "zone": "zone_value", } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py index 89d71e67398c..2a44a664a52d 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_machine_images.py @@ -4113,6 +4113,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py index e4d21a40d43c..8889054da836 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_network_attachments.py @@ -4602,6 +4602,7 @@ def test_insert_rest_call_success(request_type): "secondary_ip_cidr_ranges_value1", "secondary_ip_cidr_ranges_value2", ], + "service_class_id": "service_class_id_value", "status": "status_value", "subnetwork": "subnetwork_value", "subnetwork_cidr_range": "subnetwork_cidr_range_value", @@ -5025,6 +5026,7 @@ def test_patch_rest_call_success(request_type): "secondary_ip_cidr_ranges_value1", "secondary_ip_cidr_ranges_value2", ], + "service_class_id": "service_class_id_value", "status": "status_value", "subnetwork": "subnetwork_value", "subnetwork_cidr_range": "subnetwork_cidr_range_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_buckets.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_buckets.py new file mode 100644 index 000000000000..47f51456b7c7 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_buckets.py @@ -0,0 +1,6280 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.region_backend_buckets import ( + RegionBackendBucketsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert RegionBackendBucketsClient._get_default_mtls_endpoint(None) is None + assert ( + RegionBackendBucketsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionBackendBucketsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionBackendBucketsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionBackendBucketsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionBackendBucketsClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + RegionBackendBucketsClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionBackendBucketsClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + RegionBackendBucketsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionBackendBucketsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RegionBackendBucketsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert RegionBackendBucketsClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionBackendBucketsClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert RegionBackendBucketsClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert RegionBackendBucketsClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + RegionBackendBucketsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert RegionBackendBucketsClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert RegionBackendBucketsClient._get_client_cert_source(None, False) is None + assert ( + RegionBackendBucketsClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + RegionBackendBucketsClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RegionBackendBucketsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RegionBackendBucketsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RegionBackendBucketsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionBackendBucketsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RegionBackendBucketsClient._DEFAULT_UNIVERSE + default_endpoint = RegionBackendBucketsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionBackendBucketsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RegionBackendBucketsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RegionBackendBucketsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RegionBackendBucketsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionBackendBucketsClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + RegionBackendBucketsClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == RegionBackendBucketsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionBackendBucketsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RegionBackendBucketsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionBackendBucketsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + RegionBackendBucketsClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionBackendBucketsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RegionBackendBucketsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RegionBackendBucketsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + RegionBackendBucketsClient._get_universe_domain(None, None) + == RegionBackendBucketsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RegionBackendBucketsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionBackendBucketsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionBackendBucketsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionBackendBucketsClient, "rest"), + ], +) +def test_region_backend_buckets_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RegionBackendBucketsRestTransport, "rest"), + ], +) +def test_region_backend_buckets_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionBackendBucketsClient, "rest"), + ], +) +def test_region_backend_buckets_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_region_backend_buckets_client_get_transport_class(): + transport = RegionBackendBucketsClient.get_transport_class() + available_transports = [ + transports.RegionBackendBucketsRestTransport, + ] + assert transport in available_transports + + transport = RegionBackendBucketsClient.get_transport_class("rest") + assert transport == transports.RegionBackendBucketsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionBackendBucketsClient, + transports.RegionBackendBucketsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + RegionBackendBucketsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionBackendBucketsClient), +) +def test_region_backend_buckets_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionBackendBucketsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionBackendBucketsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RegionBackendBucketsClient, + transports.RegionBackendBucketsRestTransport, + "rest", + "true", + ), + ( + RegionBackendBucketsClient, + transports.RegionBackendBucketsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RegionBackendBucketsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionBackendBucketsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_backend_buckets_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [RegionBackendBucketsClient]) +@mock.patch.object( + RegionBackendBucketsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionBackendBucketsClient), +) +def test_region_backend_buckets_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [RegionBackendBucketsClient]) +@mock.patch.object( + RegionBackendBucketsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionBackendBucketsClient), +) +def test_region_backend_buckets_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RegionBackendBucketsClient._DEFAULT_UNIVERSE + default_endpoint = RegionBackendBucketsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionBackendBucketsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionBackendBucketsClient, + transports.RegionBackendBucketsRestTransport, + "rest", + ), + ], +) +def test_region_backend_buckets_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RegionBackendBucketsClient, + transports.RegionBackendBucketsRestTransport, + "rest", + None, + ), + ], +) +def test_region_backend_buckets_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields( + request_type=compute.DeleteRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = "backend_bucket_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == "backend_bucket_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendBucket", + "project", + "region", + ) + ) + ) + + +def test_delete_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + ) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = "backend_bucket_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == "backend_bucket_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendBucket", + "project", + "region", + ) + ) + ) + + +def test_delete_unary_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetRegionBackendBucketRequest): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = "backend_bucket_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == "backend_bucket_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucket() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "backendBucket", + "project", + "region", + ) + ) + ) + + +def test_get_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucket() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=compute.GetIamPolicyRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("optionsRequestedPolicyVersion",)) + & set( + ( + "project", + "region", + "resource", + ) + ) + ) + + +def test_get_iam_policy_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyRegionBackendBucketRequest(), + project="project_value", + region="region_value", + resource="resource_value", + ) + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields( + request_type=compute.InsertRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendBucketResource", + "project", + "region", + ) + ) + ) + + +def test_insert_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendBucketResource", + "project", + "region", + ) + ) + ) + + +def test_insert_unary_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields( + request_type=compute.ListRegionBackendBucketsRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "region", + ) + ) + ) + + +def test_list_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionBackendBucketsRequest(), + project="project_value", + region="region_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + compute.BackendBucket(), + ], + next_page_token="abc", + ), + compute.BackendBucketList( + items=[], + next_page_token="def", + ), + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + ], + next_page_token="ghi", + ), + compute.BackendBucketList( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendBucketList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendBucket) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_list_usable_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_usable in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_usable] = mock_rpc + + request = {} + client.list_usable(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_usable(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_usable_rest_required_fields( + request_type=compute.ListUsableRegionBackendBucketsRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_usable._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketListUsable() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketListUsable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_usable(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_usable_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_usable._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "region", + ) + ) + ) + + +def test_list_usable_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketListUsable() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.BackendBucketListUsable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_usable(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/listUsable" + % client.transport._host, + args[1], + ) + + +def test_list_usable_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_usable( + compute.ListUsableRegionBackendBucketsRequest(), + project="project_value", + region="region_value", + ) + + +def test_list_usable_rest_pager(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.BackendBucketListUsable( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + compute.BackendBucket(), + ], + next_page_token="abc", + ), + compute.BackendBucketListUsable( + items=[], + next_page_token="def", + ), + compute.BackendBucketListUsable( + items=[ + compute.BackendBucket(), + ], + next_page_token="ghi", + ), + compute.BackendBucketListUsable( + items=[ + compute.BackendBucket(), + compute.BackendBucket(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.BackendBucketListUsable.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list_usable(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.BackendBucket) for i in results) + + pages = list(client.list_usable(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_patch_rest_required_fields( + request_type=compute.PatchRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = "backend_bucket_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == "backend_bucket_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendBucket", + "backendBucketResource", + "project", + "region", + ) + ) + ) + + +def test_patch_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + + +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["backend_bucket"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["backendBucket"] = "backend_bucket_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "backendBucket" in jsonified_request + assert jsonified_request["backendBucket"] == "backend_bucket_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "backendBucket", + "backendBucketResource", + "project", + "region", + ) + ) + ) + + +def test_patch_unary_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{backend_bucket}" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionBackendBucketRequest(), + project="project_value", + region="region_value", + backend_bucket="backend_bucket_value", + backend_bucket_resource=compute.BackendBucket( + bucket_name="bucket_name_value" + ), + ) + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "regionSetPolicyRequestResource", + "resource", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionBackendBucketRequest(), + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.test_iam_permissions] = ( + mock_rpc + ) + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsRegionBackendBucketRequest, +): + transport_class = transports.RegionBackendBucketsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "resource", + "testPermissionsRequestResource", + ) + ) + ) + + +def test_test_iam_permissions_rest_flattened(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/backendBuckets/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsRegionBackendBucketRequest(), + project="project_value", + region="region_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionBackendBucketsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionBackendBucketsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionBackendBucketsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionBackendBucketsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionBackendBucketsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionBackendBucketsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = RegionBackendBucketsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_delete_rest_bad_request(request_type=compute.DeleteRegionBackendBucketRequest): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteRegionBackendBucketRequest, + dict, + ], +) +def test_delete_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_delete" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_delete" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.DeleteRegionBackendBucketRequest.pb( + compute.DeleteRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.DeleteRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_rest_bad_request(request_type=compute.GetRegionBackendBucketRequest): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRegionBackendBucketRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucket( + bucket_name="bucket_name_value", + compression_mode="compression_mode_value", + creation_timestamp="creation_timestamp_value", + custom_response_headers=["custom_response_headers_value"], + description="description_value", + edge_security_policy="edge_security_policy_value", + enable_cdn=True, + id=205, + kind="kind_value", + load_balancing_scheme="load_balancing_scheme_value", + name="name_value", + region="region_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucket.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.BackendBucket) + assert response.bucket_name == "bucket_name_value" + assert response.compression_mode == "compression_mode_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.custom_response_headers == ["custom_response_headers_value"] + assert response.description == "description_value" + assert response.edge_security_policy == "edge_security_policy_value" + assert response.enable_cdn is True + assert response.id == 205 + assert response.kind == "kind_value" + assert response.load_balancing_scheme == "load_balancing_scheme_value" + assert response.name == "name_value" + assert response.region == "region_value" + assert response.self_link == "self_link_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_get" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_get" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetRegionBackendBucketRequest.pb( + compute.GetRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.BackendBucket.to_json(compute.BackendBucket()) + req.return_value.content = return_value + + request = compute.GetRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendBucket() + post_with_metadata.return_value = compute.BackendBucket(), metadata + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=compute.GetIamPolicyRegionBackendBucketRequest, +): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetIamPolicyRegionBackendBucketRequest, + dict, + ], +) +def test_get_iam_policy_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_get_iam_policy" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_get_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetIamPolicyRegionBackendBucketRequest.pb( + compute.GetIamPolicyRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.GetIamPolicyRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_insert_rest_bad_request(request_type=compute.InsertRegionBackendBucketRequest): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.insert(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionBackendBucketRequest, + dict, + ], +) +def test_insert_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["backend_bucket_resource"] = { + "bucket_name": "bucket_name_value", + "cdn_policy": { + "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], + "cache_key_policy": { + "include_http_headers": [ + "include_http_headers_value1", + "include_http_headers_value2", + ], + "query_string_whitelist": [ + "query_string_whitelist_value1", + "query_string_whitelist_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": 1074, + "default_ttl": 1176, + "max_ttl": 761, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": 340}], + "request_coalescing": True, + "serve_while_stale": 1813, + "signed_url_cache_max_age_sec": 2890, + "signed_url_key_names": [ + "signed_url_key_names_value1", + "signed_url_key_names_value2", + ], + }, + "compression_mode": "compression_mode_value", + "creation_timestamp": "creation_timestamp_value", + "custom_response_headers": [ + "custom_response_headers_value1", + "custom_response_headers_value2", + ], + "description": "description_value", + "edge_security_policy": "edge_security_policy_value", + "enable_cdn": True, + "id": 205, + "kind": "kind_value", + "load_balancing_scheme": "load_balancing_scheme_value", + "name": "name_value", + "params": {"resource_manager_tags": {}}, + "region": "region_value", + "self_link": "self_link_value", + "used_by": [{"reference": "reference_value"}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_insert" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_insert" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.InsertRegionBackendBucketRequest.pb( + compute.InsertRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.InsertRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_rest_bad_request(request_type=compute.ListRegionBackendBucketsRequest): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListRegionBackendBucketsRequest, + dict, + ], +) +def test_list_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_list" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_list" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListRegionBackendBucketsRequest.pb( + compute.ListRegionBackendBucketsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.BackendBucketList.to_json(compute.BackendBucketList()) + req.return_value.content = return_value + + request = compute.ListRegionBackendBucketsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendBucketList() + post_with_metadata.return_value = compute.BackendBucketList(), metadata + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_usable_rest_bad_request( + request_type=compute.ListUsableRegionBackendBucketsRequest, +): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_usable(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListUsableRegionBackendBucketsRequest, + dict, + ], +) +def test_list_usable_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.BackendBucketListUsable( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.BackendBucketListUsable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_usable(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListUsablePager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_usable_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_list_usable" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, + "post_list_usable_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_list_usable" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListUsableRegionBackendBucketsRequest.pb( + compute.ListUsableRegionBackendBucketsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.BackendBucketListUsable.to_json( + compute.BackendBucketListUsable() + ) + req.return_value.content = return_value + + request = compute.ListUsableRegionBackendBucketsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.BackendBucketListUsable() + post_with_metadata.return_value = compute.BackendBucketListUsable(), metadata + + client.list_usable( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_patch_rest_bad_request(request_type=compute.PatchRegionBackendBucketRequest): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.patch(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRegionBackendBucketRequest, + dict, + ], +) +def test_patch_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "backend_bucket": "sample3", + } + request_init["backend_bucket_resource"] = { + "bucket_name": "bucket_name_value", + "cdn_policy": { + "bypass_cache_on_request_headers": [{"header_name": "header_name_value"}], + "cache_key_policy": { + "include_http_headers": [ + "include_http_headers_value1", + "include_http_headers_value2", + ], + "query_string_whitelist": [ + "query_string_whitelist_value1", + "query_string_whitelist_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": 1074, + "default_ttl": 1176, + "max_ttl": 761, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": 340}], + "request_coalescing": True, + "serve_while_stale": 1813, + "signed_url_cache_max_age_sec": 2890, + "signed_url_key_names": [ + "signed_url_key_names_value1", + "signed_url_key_names_value2", + ], + }, + "compression_mode": "compression_mode_value", + "creation_timestamp": "creation_timestamp_value", + "custom_response_headers": [ + "custom_response_headers_value1", + "custom_response_headers_value2", + ], + "description": "description_value", + "edge_security_policy": "edge_security_policy_value", + "enable_cdn": True, + "id": 205, + "kind": "kind_value", + "load_balancing_scheme": "load_balancing_scheme_value", + "name": "name_value", + "params": {"resource_manager_tags": {}}, + "region": "region_value", + "self_link": "self_link_value", + "used_by": [{"reference": "reference_value"}], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionBackendBucketRequest.meta.fields[ + "backend_bucket_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "backend_bucket_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["backend_bucket_resource"][field])): + del request_init["backend_bucket_resource"][field][i][subfield] + else: + del request_init["backend_bucket_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_patch" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_patch" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.PatchRegionBackendBucketRequest.pb( + compute.PatchRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.PatchRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + request_type=compute.SetIamPolicyRegionBackendBucketRequest, +): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyRegionBackendBucketRequest, + dict, + ], +) +def test_set_iam_policy_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionBackendBucketRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_set_iam_policy" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_set_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.SetIamPolicyRegionBackendBucketRequest.pb( + compute.SetIamPolicyRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.SetIamPolicyRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + request_type=compute.TestIamPermissionsRegionBackendBucketRequest, +): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.TestIamPermissionsRegionBackendBucketRequest, + dict, + ], +) +def test_test_iam_permissions_rest_call_success(request_type): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsRegionBackendBucketRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.RegionBackendBucketsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionBackendBucketsRestInterceptor(), + ) + client = RegionBackendBucketsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "post_test_iam_permissions" + ) as post, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionBackendBucketsRestInterceptor, "pre_test_iam_permissions" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.TestIamPermissionsRegionBackendBucketRequest.pb( + compute.TestIamPermissionsRegionBackendBucketRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) + req.return_value.content = return_value + + request = compute.TestIamPermissionsRegionBackendBucketRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_unary_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete), "__call__") as call: + client.delete_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.DeleteRegionBackendBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetRegionBackendBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetIamPolicyRegionBackendBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_insert_unary_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.insert), "__call__") as call: + client.insert_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.InsertRegionBackendBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list), "__call__") as call: + client.list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListRegionBackendBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_usable_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_usable), "__call__") as call: + client.list_usable(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListUsableRegionBackendBucketsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_patch_unary_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.patch), "__call__") as call: + client.patch_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.PatchRegionBackendBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.SetIamPolicyRegionBackendBucketRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.TestIamPermissionsRegionBackendBucketRequest() + + assert args[0] == request_msg + + +def test_region_backend_buckets_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionBackendBucketsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_region_backend_buckets_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.region_backend_buckets.transports.RegionBackendBucketsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RegionBackendBucketsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete", + "get", + "get_iam_policy", + "insert", + "list", + "list_usable", + "patch", + "set_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_backend_buckets_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.region_backend_buckets.transports.RegionBackendBucketsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionBackendBucketsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_region_backend_buckets_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.region_backend_buckets.transports.RegionBackendBucketsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionBackendBucketsTransport() + adc.assert_called_once() + + +def test_region_backend_buckets_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionBackendBucketsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_region_backend_buckets_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RegionBackendBucketsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_backend_buckets_host_no_port(transport_name): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_backend_buckets_host_with_port(transport_name): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_backend_buckets_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionBackendBucketsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionBackendBucketsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.list_usable._session + session2 = client2.transport.list_usable._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RegionBackendBucketsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionBackendBucketsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendBucketsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RegionBackendBucketsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionBackendBucketsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendBucketsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RegionBackendBucketsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionBackendBucketsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendBucketsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RegionBackendBucketsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionBackendBucketsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendBucketsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RegionBackendBucketsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionBackendBucketsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionBackendBucketsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RegionBackendBucketsTransport, "_prep_wrapped_messages" + ) as prep: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RegionBackendBucketsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RegionBackendBucketsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionBackendBucketsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RegionBackendBucketsClient, transports.RegionBackendBucketsRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py index a9442219af34..9c61bf8a7164 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_backend_services.py @@ -5787,12 +5787,16 @@ def test_insert_rest_call_success(request_type): "max_connections": 1608, "max_connections_per_endpoint": 2990, "max_connections_per_instance": 2978, + "max_in_flight_requests": 2356, + "max_in_flight_requests_per_endpoint": 3738, + "max_in_flight_requests_per_instance": 3726, "max_rate": 849, "max_rate_per_endpoint": 0.22310000000000002, "max_rate_per_instance": 0.22190000000000001, "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "traffic_duration": "traffic_duration_value", } ], "cdn_policy": { @@ -6508,12 +6512,16 @@ def test_patch_rest_call_success(request_type): "max_connections": 1608, "max_connections_per_endpoint": 2990, "max_connections_per_instance": 2978, + "max_in_flight_requests": 2356, + "max_in_flight_requests_per_endpoint": 3738, + "max_in_flight_requests_per_instance": 3726, "max_rate": 849, "max_rate_per_endpoint": 0.22310000000000002, "max_rate_per_instance": 0.22190000000000001, "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "traffic_duration": "traffic_duration_value", } ], "cdn_policy": { @@ -7679,12 +7687,16 @@ def test_update_rest_call_success(request_type): "max_connections": 1608, "max_connections_per_endpoint": 2990, "max_connections_per_instance": 2978, + "max_in_flight_requests": 2356, + "max_in_flight_requests_per_endpoint": 3738, + "max_in_flight_requests_per_instance": 3726, "max_rate": 849, "max_rate_per_endpoint": 0.22310000000000002, "max_rate_per_instance": 0.22190000000000001, "max_utilization": 0.1633, "orchestration_info": {"resource_uri": "resource_uri_value"}, "preference": "preference_value", + "traffic_duration": "traffic_duration_value", } ], "cdn_policy": { diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py index 6003c06a7dd0..a8c521836f70 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_commitments.py @@ -3226,6 +3226,7 @@ def test_insert_rest_call_success(request_type): "merge_source_commitments_value2", ], "name": "name_value", + "params": {"resource_manager_tags": {}}, "plan": "plan_value", "region": "region_value", "reservations": [ @@ -3247,6 +3248,7 @@ def test_insert_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", @@ -3743,6 +3745,7 @@ def test_update_rest_call_success(request_type): "merge_source_commitments_value2", ], "name": "name_value", + "params": {"resource_manager_tags": {}}, "plan": "plan_value", "region": "region_value", "reservations": [ @@ -3764,6 +3767,7 @@ def test_update_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_composite_health_checks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_composite_health_checks.py index 33687fe933f3..f8e33eff0fab 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_composite_health_checks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_composite_health_checks.py @@ -2132,6 +2132,209 @@ def test_get_rest_flattened_error(transport: str = "rest"): ) +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_health_rest_required_fields( + request_type=compute.GetHealthRegionCompositeHealthCheckRequest, +): + transport_class = transports.RegionCompositeHealthChecksRestTransport + + request_init = {} + request_init["composite_health_check"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["compositeHealthCheck"] = "composite_health_check_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "compositeHealthCheck" in jsonified_request + assert jsonified_request["compositeHealthCheck"] == "composite_health_check_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.CompositeHealthCheckHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.CompositeHealthCheckHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_health(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.RegionCompositeHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "compositeHealthCheck", + "project", + "region", + ) + ) + ) + + +def test_get_health_rest_flattened(): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.CompositeHealthCheckHealth() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "composite_health_check": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + composite_health_check="composite_health_check_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.CompositeHealthCheckHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/compositeHealthChecks/{composite_health_check}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthRegionCompositeHealthCheckRequest(), + project="project_value", + region="region_value", + composite_health_check="composite_health_check_value", + ) + + def test_insert_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -4033,6 +4236,149 @@ def test_get_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_health_rest_bad_request( + request_type=compute.GetHealthRegionCompositeHealthCheckRequest, +): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "composite_health_check": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_health(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetHealthRegionCompositeHealthCheckRequest, + dict, + ], +) +def test_get_health_rest_call_success(request_type): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "composite_health_check": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.CompositeHealthCheckHealth( + health_state="health_state_value", + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.CompositeHealthCheckHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.CompositeHealthCheckHealth) + assert response.health_state == "health_state_value" + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.RegionCompositeHealthChecksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionCompositeHealthChecksRestInterceptor(), + ) + client = RegionCompositeHealthChecksClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionCompositeHealthChecksRestInterceptor, "post_get_health" + ) as post, + mock.patch.object( + transports.RegionCompositeHealthChecksRestInterceptor, + "post_get_health_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionCompositeHealthChecksRestInterceptor, "pre_get_health" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetHealthRegionCompositeHealthCheckRequest.pb( + compute.GetHealthRegionCompositeHealthCheckRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.CompositeHealthCheckHealth.to_json( + compute.CompositeHealthCheckHealth() + ) + req.return_value.content = return_value + + request = compute.GetHealthRegionCompositeHealthCheckRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.CompositeHealthCheckHealth() + post_with_metadata.return_value = compute.CompositeHealthCheckHealth(), metadata + + client.get_health( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_insert_rest_bad_request( request_type=compute.InsertRegionCompositeHealthCheckRequest, ): @@ -4984,6 +5330,26 @@ def test_get_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_health_empty_call_rest(): + client = RegionCompositeHealthChecksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_health), "__call__") as call: + client.get_health(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetHealthRegionCompositeHealthCheckRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_insert_unary_empty_call_rest(): @@ -5091,6 +5457,7 @@ def test_region_composite_health_checks_base_transport(): "aggregated_list", "delete", "get", + "get_health", "insert", "list", "patch", @@ -5249,6 +5616,9 @@ def test_region_composite_health_checks_client_transport_session_collision( session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py index abd8194c8d28..f4c4ae53acb1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_disks.py @@ -1723,7 +1723,9 @@ def test_bulk_insert_rest_flattened(): project="project_value", region="region_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) mock_args.update(sample_request) @@ -1765,7 +1767,9 @@ def test_bulk_insert_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) @@ -1929,7 +1933,9 @@ def test_bulk_insert_unary_rest_flattened(): project="project_value", region="region_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) mock_args.update(sample_request) @@ -1971,7 +1977,9 @@ def test_bulk_insert_unary_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", bulk_insert_disk_resource_resource=compute.BulkInsertDiskResource( - source_consistency_group_policy="source_consistency_group_policy_value" + instant_snapshot_group_parameters=compute.InstantSnapshotGroupParameters( + source_instant_snapshot_group="source_instant_snapshot_group_value" + ) ), ) @@ -7335,187 +7343,613 @@ def test_update_unary_rest_flattened_error(transport: str = "rest"): ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.RegionDisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = RegionDisksClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport="rest", ) - # It is an error to provide a credentials file and a transport instance. - transport = transports.RegionDisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RegionDisksClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a transport instance. - transport = transports.RegionDisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RegionDisksClient( - client_options=options, - transport=transport, - ) + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = RegionDisksClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc - # It is an error to provide scopes and a transport instance. - transport = transports.RegionDisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = RegionDisksClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) + request = {} + client.update_kms_key(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.RegionDisksRestTransport( - credentials=ga_credentials.AnonymousCredentials(), + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionDiskRequest, +): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - client = RegionDisksClient(transport=transport) - assert client.transport is transport + # verify fields with default values are dropped -@pytest.mark.parametrize( - "transport_class", - [ - transports.RegionDisksRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_transport_kind_rest(): - transport = RegionDisksClient.get_transport_class("rest")( + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" -def test_add_resource_policies_rest_bad_request( - request_type=compute.AddResourcePoliciesRegionDiskRequest, -): client = RegionDisksClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request = request_type(**request_init) - # Mock the http request call within the method and fake a BadRequest error. - with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.add_resource_policies(request) + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -@pytest.mark.parametrize( - "request_type", - [ - compute.AddResourcePoliciesRegionDiskRequest, - dict, - ], -) -def test_add_resource_policies_rest_call_success(request_type): - client = RegionDisksClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # send a request that will satisfy transcoding - request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} - request_init["region_disks_add_resource_policies_request_resource"] = { - "resource_policies": ["resource_policies_value1", "resource_policies_value2"] - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Determine if the message type is proto-plus or protobuf - test_field = compute.AddResourcePoliciesRegionDiskRequest.meta.fields[ - "region_disks_add_resource_policies_request_resource" - ] + response = client.update_kms_key(request) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "project", + "region", + "regionDiskUpdateKmsKeyRequestResource", + ) + ) + ) - subfields_not_in_runtime = [] - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "region_disks_add_resource_policies_request_resource" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value +def test_update_kms_key_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeyRegionDiskRequest(), + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionDiskRequest, +): + transport_class = transports.RegionDisksRestTransport + + request_init = {} + request_init["disk"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["disk"] = "disk_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "disk" in jsonified_request + assert jsonified_request["disk"] == "disk_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "disk", + "project", + "region", + "regionDiskUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2", "disk": "sample3"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/disks/{disk}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeyRegionDiskRequest(), + project="project_value", + region="region_value", + disk="disk_value", + region_disk_update_kms_key_request_resource=compute.RegionDiskUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionDisksClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionDisksClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionDisksRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = RegionDisksClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_add_resource_policies_rest_bad_request( + request_type=compute.AddResourcePoliciesRegionDiskRequest, +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.add_resource_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.AddResourcePoliciesRegionDiskRequest, + dict, + ], +) +def test_add_resource_policies_rest_call_success(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init["region_disks_add_resource_policies_request_resource"] = { + "resource_policies": ["resource_policies_value1", "resource_policies_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.AddResourcePoliciesRegionDiskRequest.meta.fields[ + "region_disks_add_resource_policies_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disks_add_resource_policies_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, } ) @@ -7714,7 +8148,15 @@ def test_bulk_insert_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["bulk_insert_disk_resource_resource"] = { - "source_consistency_group_policy": "source_consistency_group_policy_value" + "instant_snapshot_group_parameters": { + "source_instant_snapshot_group": "source_instant_snapshot_group_value" + }, + "snapshot_group_parameters": { + "replica_zones": ["replica_zones_value1", "replica_zones_value2"], + "source_snapshot_group": "source_snapshot_group_value", + "type_": "type__value", + }, + "source_consistency_group_policy": "source_consistency_group_policy_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -7984,6 +8426,7 @@ def test_create_snapshot_rest_call_success(request_type): "location_hint": "location_hint_value", "name": "name_value", "params": {"resource_manager_tags": {}}, + "region": "region_value", "satisfies_pzi": True, "satisfies_pzs": True, "self_link": "self_link_value", @@ -7994,6 +8437,8 @@ def test_create_snapshot_rest_call_success(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_group_id": "snapshot_group_id_value", + "snapshot_group_name": "snapshot_group_name_value", "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, @@ -11345,6 +11790,263 @@ def test_update_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request( + request_type=compute.UpdateKmsKeyRegionDiskRequest, +): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeyRegionDiskRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} + request_init["region_disk_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeyRegionDiskRequest.meta.fields[ + "region_disk_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_disk_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["region_disk_update_kms_key_request_resource"][ + field + ] + ), + ): + del request_init["region_disk_update_kms_key_request_resource"][ + field + ][i][subfield] + else: + del request_init["region_disk_update_kms_key_request_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.RegionDisksRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionDisksRestInterceptor(), + ) + client = RegionDisksClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionDisksRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.RegionDisksRestInterceptor, "post_update_kms_key_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionDisksRestInterceptor, "pre_update_kms_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeyRegionDiskRequest.pb( + compute.UpdateKmsKeyRegionDiskRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeyRegionDiskRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = RegionDisksClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -11704,6 +12406,26 @@ def test_update_unary_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = RegionDisksClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeyRegionDiskRequest() + + assert args[0] == request_msg + + def test_region_disks_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -11743,6 +12465,7 @@ def test_region_disks_base_transport(): "stop_group_async_replication", "test_iam_permissions", "update", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -11937,6 +12660,9 @@ def test_region_disks_client_transport_session_collision(transport_name): session1 = client1.transport.update._session session2 = client2.transport.update._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_sources.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_sources.py index c7adcc74f068..2fb3cf25f5a3 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_sources.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_health_sources.py @@ -2089,6 +2089,209 @@ def test_get_rest_flattened_error(transport: str = "rest"): ) +def test_get_health_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_health in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_health] = mock_rpc + + request = {} + client.get_health(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_health(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_health_rest_required_fields( + request_type=compute.GetHealthRegionHealthSourceRequest, +): + transport_class = transports.RegionHealthSourcesRestTransport + + request_init = {} + request_init["health_source"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["healthSource"] = "health_source_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_health._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "healthSource" in jsonified_request + assert jsonified_request["healthSource"] == "health_source_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.HealthSourceHealth() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.HealthSourceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_health(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_health_rest_unset_required_fields(): + transport = transports.RegionHealthSourcesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_health._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "healthSource", + "project", + "region", + ) + ) + ) + + +def test_get_health_rest_flattened(): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthSourceHealth() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "health_source": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + health_source="health_source_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.HealthSourceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_health(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/healthSources/{health_source}/getHealth" + % client.transport._host, + args[1], + ) + + +def test_get_health_rest_flattened_error(transport: str = "rest"): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_health( + compute.GetHealthRegionHealthSourceRequest(), + project="project_value", + region="region_value", + health_source="health_source_value", + ) + + def test_insert_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -3978,6 +4181,147 @@ def test_get_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_health_rest_bad_request( + request_type=compute.GetHealthRegionHealthSourceRequest, +): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_source": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_health(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetHealthRegionHealthSourceRequest, + dict, + ], +) +def test_get_health_rest_call_success(request_type): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "health_source": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.HealthSourceHealth( + health_state="health_state_value", + kind="kind_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.HealthSourceHealth.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_health(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.HealthSourceHealth) + assert response.health_state == "health_state_value" + assert response.kind == "kind_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_health_rest_interceptors(null_interceptor): + transport = transports.RegionHealthSourcesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionHealthSourcesRestInterceptor(), + ) + client = RegionHealthSourcesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionHealthSourcesRestInterceptor, "post_get_health" + ) as post, + mock.patch.object( + transports.RegionHealthSourcesRestInterceptor, + "post_get_health_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionHealthSourcesRestInterceptor, "pre_get_health" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetHealthRegionHealthSourceRequest.pb( + compute.GetHealthRegionHealthSourceRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.HealthSourceHealth.to_json(compute.HealthSourceHealth()) + req.return_value.content = return_value + + request = compute.GetHealthRegionHealthSourceRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.HealthSourceHealth() + post_with_metadata.return_value = compute.HealthSourceHealth(), metadata + + client.get_health( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_insert_rest_bad_request(request_type=compute.InsertRegionHealthSourceRequest): client = RegionHealthSourcesClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -4908,6 +5252,26 @@ def test_get_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_health_empty_call_rest(): + client = RegionHealthSourcesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_health), "__call__") as call: + client.get_health(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetHealthRegionHealthSourceRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_insert_unary_empty_call_rest(): @@ -5015,6 +5379,7 @@ def test_region_health_sources_base_transport(): "aggregated_list", "delete", "get", + "get_health", "insert", "list", "patch", @@ -5171,6 +5536,9 @@ def test_region_health_sources_client_transport_session_collision(transport_name session1 = client1.transport.get._session session2 = client2.transport.get._session assert session1 != session2 + session1 = client1.transport.get_health._session + session2 = client2.transport.get_health._session + assert session1 != session2 session1 = client1.transport.insert._session session2 = client2.transport.insert._session assert session1 != session2 diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_manager_resize_requests.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_manager_resize_requests.py new file mode 100644 index 000000000000..d52ac94ed4fb --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_manager_resize_requests.py @@ -0,0 +1,4690 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.region_instance_group_manager_resize_requests import ( + RegionInstanceGroupManagerResizeRequestsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint(None) + is None + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + api_endpoint + ) + == api_mtls_endpoint + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + api_mtls_endpoint + ) + == api_mtls_endpoint + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + sandbox_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + non_googleapi + ) + == non_googleapi + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_default_mtls_endpoint( + custom_endpoint + ) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (True, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == ( + False, + "auto", + None, + ) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "never", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "always", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", None) + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._read_environment_variables() + == (False, "auto", "foo.com") + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is True + ) + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is True + ) + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is True + ) + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is True + ) + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._use_client_cert_effective() + is False + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_client_cert_source( + None, False + ) + is None + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_client_cert_source( + None, True + ) + is mock_default_cert_source + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstanceGroupManagerResizeRequestsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + default_endpoint = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RegionInstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == RegionInstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RegionInstanceGroupManagerResizeRequestsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionInstanceGroupManagerResizeRequestsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + RegionInstanceGroupManagerResizeRequestsClient._get_universe_domain(None, None) + == RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RegionInstanceGroupManagerResizeRequestsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstanceGroupManagerResizeRequestsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstanceGroupManagerResizeRequestsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionInstanceGroupManagerResizeRequestsClient, "rest"), + ], +) +def test_region_instance_group_manager_resize_requests_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RegionInstanceGroupManagerResizeRequestsRestTransport, "rest"), + ], +) +def test_region_instance_group_manager_resize_requests_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionInstanceGroupManagerResizeRequestsClient, "rest"), + ], +) +def test_region_instance_group_manager_resize_requests_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_region_instance_group_manager_resize_requests_client_get_transport_class(): + transport = RegionInstanceGroupManagerResizeRequestsClient.get_transport_class() + available_transports = [ + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + ] + assert transport in available_transports + + transport = RegionInstanceGroupManagerResizeRequestsClient.get_transport_class( + "rest" + ) + assert transport == transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionInstanceGroupManagerResizeRequestsClient, + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstanceGroupManagerResizeRequestsClient), +) +def test_region_instance_group_manager_resize_requests_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RegionInstanceGroupManagerResizeRequestsClient, + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + "rest", + "true", + ), + ( + RegionInstanceGroupManagerResizeRequestsClient, + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstanceGroupManagerResizeRequestsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_instance_group_manager_resize_requests_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [RegionInstanceGroupManagerResizeRequestsClient] +) +@mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionInstanceGroupManagerResizeRequestsClient), +) +def test_region_instance_group_manager_resize_requests_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize( + "client_class", [RegionInstanceGroupManagerResizeRequestsClient] +) +@mock.patch.object( + RegionInstanceGroupManagerResizeRequestsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstanceGroupManagerResizeRequestsClient), +) +def test_region_instance_group_manager_resize_requests_client_client_api_endpoint( + client_class, +): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_UNIVERSE + default_endpoint = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionInstanceGroupManagerResizeRequestsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionInstanceGroupManagerResizeRequestsClient, + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + "rest", + ), + ], +) +def test_region_instance_group_manager_resize_requests_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RegionInstanceGroupManagerResizeRequestsClient, + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + "rest", + None, + ), + ], +) +def test_region_instance_group_manager_resize_requests_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_cancel_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel] = mock_rpc + + request = {} + client.cancel(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_rest_required_fields( + request_type=compute.CancelRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["resize_request"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resizeRequest"] = "resize_request_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "region", + "resizeRequest", + ) + ) + ) + + +def test_cancel_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.cancel(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}/cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel( + compute.CancelRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_cancel_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.cancel in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.cancel] = mock_rpc + + request = {} + client.cancel_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.cancel_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_cancel_unary_rest_required_fields( + request_type=compute.CancelRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["resize_request"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resizeRequest"] = "resize_request_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).cancel._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_cancel_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.cancel._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "region", + "resizeRequest", + ) + ) + ) + + +def test_cancel_unary_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.cancel_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}/cancel" + % client.transport._host, + args[1], + ) + + +def test_cancel_unary_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.cancel_unary( + compute.CancelRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields( + request_type=compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["resize_request"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resizeRequest"] = "resize_request_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "region", + "resizeRequest", + ) + ) + ) + + +def test_delete_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["resize_request"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resizeRequest"] = "resize_request_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "project", + "region", + "resizeRequest", + ) + ) + ) + + +def test_delete_unary_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields( + request_type=compute.GetRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request_init["resize_request"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resizeRequest"] = "resize_request_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resizeRequest" in jsonified_request + assert jsonified_request["resizeRequest"] == "resize_request_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequest.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instanceGroupManager", + "project", + "region", + "resizeRequest", + ) + ) + ) + + +def test_get_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequest() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequest.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests/{resize_request}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + resize_request="resize_request_value", + ) + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields( + request_type=compute.InsertRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "instanceGroupManagerResizeRequestResource", + "project", + "region", + ) + ) + ) + + +def test_insert_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertRegionInstanceGroupManagerResizeRequestRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instanceGroupManager", + "instanceGroupManagerResizeRequestResource", + "project", + "region", + ) + ) + ) + + +def test_insert_unary_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionInstanceGroupManagerResizeRequestRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + instance_group_manager_resize_request_resource=compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields( + request_type=compute.ListRegionInstanceGroupManagerResizeRequestsRequest, +): + transport_class = transports.RegionInstanceGroupManagerResizeRequestsRestTransport + + request_init = {} + request_init["instance_group_manager"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instanceGroupManager"] = "instance_group_manager_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instanceGroupManager" in jsonified_request + assert jsonified_request["instanceGroupManager"] == "instance_group_manager_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagerResizeRequestsListResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + compute.RegionInstanceGroupManagerResizeRequestsListResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "instanceGroupManager", + "project", + "region", + ) + ) + ) + + +def test_list_rest_flattened(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagerResizeRequestsListResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagerResizeRequestsListResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instanceGroupManagers/{instance_group_manager}/resizeRequests" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionInstanceGroupManagerResizeRequestsRequest(), + project="project_value", + region="region_value", + instance_group_manager="instance_group_manager_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.RegionInstanceGroupManagerResizeRequestsListResponse( + items=[ + compute.InstanceGroupManagerResizeRequest(), + compute.InstanceGroupManagerResizeRequest(), + compute.InstanceGroupManagerResizeRequest(), + ], + next_page_token="abc", + ), + compute.RegionInstanceGroupManagerResizeRequestsListResponse( + items=[], + next_page_token="def", + ), + compute.RegionInstanceGroupManagerResizeRequestsListResponse( + items=[ + compute.InstanceGroupManagerResizeRequest(), + ], + next_page_token="ghi", + ), + compute.RegionInstanceGroupManagerResizeRequestsListResponse( + items=[ + compute.InstanceGroupManagerResizeRequest(), + compute.InstanceGroupManagerResizeRequest(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + compute.RegionInstanceGroupManagerResizeRequestsListResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, compute.InstanceGroupManagerResizeRequest) for i in results + ) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupManagerResizeRequestsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceGroupManagerResizeRequestsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstanceGroupManagerResizeRequestsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstanceGroupManagerResizeRequestsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = RegionInstanceGroupManagerResizeRequestsClient.get_transport_class( + "rest" + )(credentials=ga_credentials.AnonymousCredentials()) + assert transport.kind == "rest" + + +def test_cancel_rest_bad_request( + request_type=compute.CancelRegionInstanceGroupManagerResizeRequestRequest, +): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.CancelRegionInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_cancel_rest_call_success(request_type): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.cancel(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_cancel_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_cancel", + ) as post, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_cancel_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "pre_cancel", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.CancelRegionInstanceGroupManagerResizeRequestRequest.pb( + compute.CancelRegionInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.CancelRegionInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.cancel( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_rest_bad_request( + request_type=compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, +): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteRegionInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_delete_rest_call_success(request_type): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_delete", + ) as post, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "pre_delete", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.DeleteRegionInstanceGroupManagerResizeRequestRequest.pb( + compute.DeleteRegionInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.DeleteRegionInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_rest_bad_request( + request_type=compute.GetRegionInstanceGroupManagerResizeRequestRequest, +): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRegionInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + "resize_request": "sample4", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstanceGroupManagerResizeRequest( + creation_timestamp="creation_timestamp_value", + description="description_value", + id=205, + kind="kind_value", + name="name_value", + region="region_value", + resize_by=972, + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + state="state_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstanceGroupManagerResizeRequest.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstanceGroupManagerResizeRequest) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.region == "region_value" + assert response.resize_by == 972 + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.state == "state_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_get", + ) as post, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_get_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "pre_get", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetRegionInstanceGroupManagerResizeRequestRequest.pb( + compute.GetRegionInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.InstanceGroupManagerResizeRequest.to_json( + compute.InstanceGroupManagerResizeRequest() + ) + req.return_value.content = return_value + + request = compute.GetRegionInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstanceGroupManagerResizeRequest() + post_with_metadata.return_value = ( + compute.InstanceGroupManagerResizeRequest(), + metadata, + ) + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_insert_rest_bad_request( + request_type=compute.InsertRegionInstanceGroupManagerResizeRequestRequest, +): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.insert(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionInstanceGroupManagerResizeRequestRequest, + dict, + ], +) +def test_insert_rest_call_success(request_type): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request_init["instance_group_manager_resize_request_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "region": "region_value", + "requested_run_duration": {"nanos": 543, "seconds": 751}, + "resize_by": 972, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "status": { + "error": { + "errors": [ + { + "code": "code_value", + "error_details": [ + { + "error_info": { + "domain": "domain_value", + "metadatas": {}, + "reason": "reason_value", + }, + "help_": { + "links": [ + { + "description": "description_value", + "url": "url_value", + } + ] + }, + "localized_message": { + "locale": "locale_value", + "message": "message_value", + }, + "quota_info": { + "dimensions": {}, + "future_limit": 0.1305, + "limit": 0.543, + "limit_name": "limit_name_value", + "metric_name": "metric_name_value", + "rollout_status": "rollout_status_value", + }, + } + ], + "location": "location_value", + "message": "message_value", + } + ] + }, + "last_attempt": {"error": {}}, + }, + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.InsertRegionInstanceGroupManagerResizeRequestRequest.meta.fields[ + "instance_group_manager_resize_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instance_group_manager_resize_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["instance_group_manager_resize_request_resource"][ + field + ] + ), + ): + del request_init["instance_group_manager_resize_request_resource"][ + field + ][i][subfield] + else: + del request_init["instance_group_manager_resize_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_insert", + ) as post, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "pre_insert", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.InsertRegionInstanceGroupManagerResizeRequestRequest.pb( + compute.InsertRegionInstanceGroupManagerResizeRequestRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.InsertRegionInstanceGroupManagerResizeRequestRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_rest_bad_request( + request_type=compute.ListRegionInstanceGroupManagerResizeRequestsRequest, +): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListRegionInstanceGroupManagerResizeRequestsRequest, + dict, + ], +) +def test_list_rest_call_success(request_type): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instance_group_manager": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.RegionInstanceGroupManagerResizeRequestsListResponse( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.RegionInstanceGroupManagerResizeRequestsListResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor(), + ) + client = RegionInstanceGroupManagerResizeRequestsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_list", + ) as post, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "post_list_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsRestInterceptor, + "pre_list", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListRegionInstanceGroupManagerResizeRequestsRequest.pb( + compute.ListRegionInstanceGroupManagerResizeRequestsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + compute.RegionInstanceGroupManagerResizeRequestsListResponse.to_json( + compute.RegionInstanceGroupManagerResizeRequestsListResponse() + ) + ) + req.return_value.content = return_value + + request = compute.ListRegionInstanceGroupManagerResizeRequestsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = ( + compute.RegionInstanceGroupManagerResizeRequestsListResponse() + ) + post_with_metadata.return_value = ( + compute.RegionInstanceGroupManagerResizeRequestsListResponse(), + metadata, + ) + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_cancel_unary_empty_call_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.cancel), "__call__") as call: + client.cancel_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.CancelRegionInstanceGroupManagerResizeRequestRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_unary_empty_call_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete), "__call__") as call: + client.delete_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.DeleteRegionInstanceGroupManagerResizeRequestRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetRegionInstanceGroupManagerResizeRequestRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_insert_unary_empty_call_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.insert), "__call__") as call: + client.insert_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.InsertRegionInstanceGroupManagerResizeRequestRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_empty_call_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list), "__call__") as call: + client.list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListRegionInstanceGroupManagerResizeRequestsRequest() + + assert args[0] == request_msg + + +def test_region_instance_group_manager_resize_requests_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionInstanceGroupManagerResizeRequestsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_region_instance_group_manager_resize_requests_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.transports.RegionInstanceGroupManagerResizeRequestsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RegionInstanceGroupManagerResizeRequestsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "cancel", + "delete", + "get", + "insert", + "list", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_instance_group_manager_resize_requests_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.transports.RegionInstanceGroupManagerResizeRequestsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceGroupManagerResizeRequestsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_region_instance_group_manager_resize_requests_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.region_instance_group_manager_resize_requests.transports.RegionInstanceGroupManagerResizeRequestsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstanceGroupManagerResizeRequestsTransport() + adc.assert_called_once() + + +def test_region_instance_group_manager_resize_requests_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionInstanceGroupManagerResizeRequestsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_region_instance_group_manager_resize_requests_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RegionInstanceGroupManagerResizeRequestsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_instance_group_manager_resize_requests_host_no_port(transport_name): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_instance_group_manager_resize_requests_host_with_port(transport_name): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_instance_group_manager_resize_requests_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionInstanceGroupManagerResizeRequestsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionInstanceGroupManagerResizeRequestsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.cancel._session + session2 = client2.transport.cancel._session + assert session1 != session2 + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RegionInstanceGroupManagerResizeRequestsClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionInstanceGroupManagerResizeRequestsClient.common_billing_account_path( + **expected + ) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagerResizeRequestsClient.parse_common_billing_account_path( + path + ) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RegionInstanceGroupManagerResizeRequestsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionInstanceGroupManagerResizeRequestsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagerResizeRequestsClient.parse_common_folder_path( + path + ) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RegionInstanceGroupManagerResizeRequestsClient.common_organization_path( + organization + ) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionInstanceGroupManagerResizeRequestsClient.common_organization_path( + **expected + ) + + # Check that the path construction is reversible. + actual = ( + RegionInstanceGroupManagerResizeRequestsClient.parse_common_organization_path( + path + ) + ) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RegionInstanceGroupManagerResizeRequestsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionInstanceGroupManagerResizeRequestsClient.common_project_path( + **expected + ) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagerResizeRequestsClient.parse_common_project_path( + path + ) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RegionInstanceGroupManagerResizeRequestsClient.common_location_path( + project, location + ) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionInstanceGroupManagerResizeRequestsClient.common_location_path( + **expected + ) + + # Check that the path construction is reversible. + actual = RegionInstanceGroupManagerResizeRequestsClient.parse_common_location_path( + path + ) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsTransport, + "_prep_wrapped_messages", + ) as prep: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RegionInstanceGroupManagerResizeRequestsTransport, + "_prep_wrapped_messages", + ) as prep: + transport_class = ( + RegionInstanceGroupManagerResizeRequestsClient.get_transport_class() + ) + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionInstanceGroupManagerResizeRequestsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + RegionInstanceGroupManagerResizeRequestsClient, + transports.RegionInstanceGroupManagerResizeRequestsRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py index b068c6e5eb80..2007f9625af1 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_group_managers.py @@ -12488,6 +12488,21 @@ def test_insert_rest_call_success(request_type): "in_progress": True, "last_progress_check": {"error": {}, "timestamp": "timestamp_value"}, }, + "current_instance_statuses": { + "deprovisioning": 1520, + "non_existent": 1310, + "pending": 741, + "pending_stop": 1290, + "provisioning": 1319, + "repairing": 961, + "running": 769, + "staging": 749, + "stopped": 767, + "stopping": 884, + "suspended": 971, + "suspending": 1088, + "terminated": 1069, + }, "is_stable": True, "stateful": { "has_stateful_config": True, @@ -13469,6 +13484,21 @@ def test_patch_rest_call_success(request_type): "in_progress": True, "last_progress_check": {"error": {}, "timestamp": "timestamp_value"}, }, + "current_instance_statuses": { + "deprovisioning": 1520, + "non_existent": 1310, + "pending": 741, + "pending_stop": 1290, + "provisioning": 1319, + "repairing": 961, + "running": 769, + "staging": 749, + "stopped": 767, + "stopping": 884, + "suspended": 971, + "suspending": 1088, + "terminated": 1069, + }, "is_stable": True, "stateful": { "has_stateful_config": True, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py index 9cb1311ea0b8..263cd12e87a2 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instance_templates.py @@ -3089,6 +3089,7 @@ def test_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py index 6bddefba6972..3758b0dd9cee 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instances.py @@ -1828,6 +1828,7 @@ def test_bulk_insert_rest_call_success(request_type): "nic_type": "nic_type_value", "parent_nic_name": "parent_nic_name_value", "queue_count": 1197, + "service_class_id": "service_class_id_value", "stack_type": "stack_type_value", "subnetwork": "subnetwork_value", "vlan": 433, diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshot_groups.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshot_groups.py new file mode 100644 index 000000000000..f1381e39c6cd --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshot_groups.py @@ -0,0 +1,5161 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.region_instant_snapshot_groups import ( + RegionInstantSnapshotGroupsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint(None) is None + assert ( + RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint( + sandbox_mtls_endpoint + ) + == sandbox_mtls_endpoint + ) + assert ( + RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + RegionInstantSnapshotGroupsClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + RegionInstantSnapshotGroupsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionInstantSnapshotGroupsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RegionInstantSnapshotGroupsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is True + ) + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is False + ) + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is True + ) + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is False + ) + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is True + ) + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is False + ) + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is True + ) + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is False + ) + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is False + ) + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + RegionInstantSnapshotGroupsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() is False + ) + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert ( + RegionInstantSnapshotGroupsClient._use_client_cert_effective() + is False + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ( + RegionInstantSnapshotGroupsClient._get_client_cert_source(None, False) is None + ) + assert ( + RegionInstantSnapshotGroupsClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + RegionInstantSnapshotGroupsClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RegionInstantSnapshotGroupsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RegionInstantSnapshotGroupsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RegionInstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstantSnapshotGroupsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RegionInstantSnapshotGroupsClient._DEFAULT_UNIVERSE + default_endpoint = ( + RegionInstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = RegionInstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RegionInstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == RegionInstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RegionInstantSnapshotGroupsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionInstantSnapshotGroupsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RegionInstantSnapshotGroupsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RegionInstantSnapshotGroupsClient._get_universe_domain( + None, universe_domain_env + ) + == universe_domain_env + ) + assert ( + RegionInstantSnapshotGroupsClient._get_universe_domain(None, None) + == RegionInstantSnapshotGroupsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RegionInstantSnapshotGroupsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionInstantSnapshotGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionInstantSnapshotGroupsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionInstantSnapshotGroupsClient, "rest"), + ], +) +def test_region_instant_snapshot_groups_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RegionInstantSnapshotGroupsRestTransport, "rest"), + ], +) +def test_region_instant_snapshot_groups_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionInstantSnapshotGroupsClient, "rest"), + ], +) +def test_region_instant_snapshot_groups_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_region_instant_snapshot_groups_client_get_transport_class(): + transport = RegionInstantSnapshotGroupsClient.get_transport_class() + available_transports = [ + transports.RegionInstantSnapshotGroupsRestTransport, + ] + assert transport in available_transports + + transport = RegionInstantSnapshotGroupsClient.get_transport_class("rest") + assert transport == transports.RegionInstantSnapshotGroupsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionInstantSnapshotGroupsClient, + transports.RegionInstantSnapshotGroupsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + RegionInstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstantSnapshotGroupsClient), +) +def test_region_instant_snapshot_groups_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object( + RegionInstantSnapshotGroupsClient, "get_transport_class" + ) as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object( + RegionInstantSnapshotGroupsClient, "get_transport_class" + ) as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RegionInstantSnapshotGroupsClient, + transports.RegionInstantSnapshotGroupsRestTransport, + "rest", + "true", + ), + ( + RegionInstantSnapshotGroupsClient, + transports.RegionInstantSnapshotGroupsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RegionInstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstantSnapshotGroupsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_instant_snapshot_groups_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [RegionInstantSnapshotGroupsClient]) +@mock.patch.object( + RegionInstantSnapshotGroupsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionInstantSnapshotGroupsClient), +) +def test_region_instant_snapshot_groups_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [RegionInstantSnapshotGroupsClient]) +@mock.patch.object( + RegionInstantSnapshotGroupsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionInstantSnapshotGroupsClient), +) +def test_region_instant_snapshot_groups_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RegionInstantSnapshotGroupsClient._DEFAULT_UNIVERSE + default_endpoint = ( + RegionInstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + ) + mock_universe = "bar.com" + mock_endpoint = RegionInstantSnapshotGroupsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionInstantSnapshotGroupsClient, + transports.RegionInstantSnapshotGroupsRestTransport, + "rest", + ), + ], +) +def test_region_instant_snapshot_groups_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RegionInstantSnapshotGroupsClient, + transports.RegionInstantSnapshotGroupsRestTransport, + "rest", + None, + ), + ], +) +def test_region_instant_snapshot_groups_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields( + request_type=compute.DeleteRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["instant_snapshot_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instantSnapshotGroup"] = "instant_snapshot_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instantSnapshotGroup" in jsonified_request + assert jsonified_request["instantSnapshotGroup"] == "instant_snapshot_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instantSnapshotGroup", + "project", + "region", + ) + ) + ) + + +def test_delete_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{instant_snapshot_group}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["instant_snapshot_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instantSnapshotGroup"] = "instant_snapshot_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instantSnapshotGroup" in jsonified_request + assert jsonified_request["instantSnapshotGroup"] == "instant_snapshot_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "instantSnapshotGroup", + "project", + "region", + ) + ) + ) + + +def test_delete_unary_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{instant_snapshot_group}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields( + request_type=compute.GetRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["instant_snapshot_group"] = "" + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["instantSnapshotGroup"] = "instant_snapshot_group_value" + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "instantSnapshotGroup" in jsonified_request + assert jsonified_request["instantSnapshotGroup"] == "instant_snapshot_group_value" + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.InstantSnapshotGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstantSnapshotGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "instantSnapshotGroup", + "project", + "region", + ) + ) + ) + + +def test_get_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstantSnapshotGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.InstantSnapshotGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{instant_snapshot_group}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + instant_snapshot_group="instant_snapshot_group_value", + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=compute.GetIamPolicyRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("optionsRequestedPolicyVersion",)) + & set( + ( + "project", + "region", + "resource", + ) + ) + ) + + +def test_get_iam_policy_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + resource="resource_value", + ) + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields( + request_type=compute.InsertRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "source_consistency_group", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sourceConsistencyGroup", + ) + ) + & set( + ( + "instantSnapshotGroupResource", + "project", + "region", + ) + ) + ) + + +def test_insert_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "source_consistency_group", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "sourceConsistencyGroup", + ) + ) + & set( + ( + "instantSnapshotGroupResource", + "project", + "region", + ) + ) + ) + + +def test_insert_unary_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + instant_snapshot_group_resource=compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields( + request_type=compute.ListRegionInstantSnapshotGroupsRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.ListInstantSnapshotGroups() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.ListInstantSnapshotGroups.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "region", + ) + ) + ) + + +def test_list_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.ListInstantSnapshotGroups() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.ListInstantSnapshotGroups.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionInstantSnapshotGroupsRequest(), + project="project_value", + region="region_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.ListInstantSnapshotGroups( + items=[ + compute.InstantSnapshotGroup(), + compute.InstantSnapshotGroup(), + compute.InstantSnapshotGroup(), + ], + next_page_token="abc", + ), + compute.ListInstantSnapshotGroups( + items=[], + next_page_token="def", + ), + compute.ListInstantSnapshotGroups( + items=[ + compute.InstantSnapshotGroup(), + ], + next_page_token="ghi", + ), + compute.ListInstantSnapshotGroups( + items=[ + compute.InstantSnapshotGroup(), + compute.InstantSnapshotGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.ListInstantSnapshotGroups.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.InstantSnapshotGroup) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "regionSetPolicyRequestResource", + "resource", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.test_iam_permissions] = ( + mock_rpc + ) + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, +): + transport_class = transports.RegionInstantSnapshotGroupsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "resource", + "testPermissionsRequestResource", + ) + ) + ) + + +def test_test_iam_permissions_rest_flattened(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/instantSnapshotGroups/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsRegionInstantSnapshotGroupRequest(), + project="project_value", + region="region_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstantSnapshotGroupsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstantSnapshotGroupsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionInstantSnapshotGroupsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionInstantSnapshotGroupsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionInstantSnapshotGroupsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = RegionInstantSnapshotGroupsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_delete_rest_bad_request( + request_type=compute.DeleteRegionInstantSnapshotGroupRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteRegionInstantSnapshotGroupRequest, + dict, + ], +) +def test_delete_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "post_delete" + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "pre_delete" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.DeleteRegionInstantSnapshotGroupRequest.pb( + compute.DeleteRegionInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.DeleteRegionInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_rest_bad_request( + request_type=compute.GetRegionInstantSnapshotGroupRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRegionInstantSnapshotGroupRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "region": "sample2", + "instant_snapshot_group": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.InstantSnapshotGroup( + creation_timestamp="creation_timestamp_value", + description="description_value", + id=205, + kind="kind_value", + name="name_value", + region="region_value", + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + source_consistency_group="source_consistency_group_value", + status="status_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.InstantSnapshotGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.InstantSnapshotGroup) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.source_consistency_group == "source_consistency_group_value" + assert response.status == "status_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "post_get" + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_get_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "pre_get" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetRegionInstantSnapshotGroupRequest.pb( + compute.GetRegionInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.InstantSnapshotGroup.to_json( + compute.InstantSnapshotGroup() + ) + req.return_value.content = return_value + + request = compute.GetRegionInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.InstantSnapshotGroup() + post_with_metadata.return_value = compute.InstantSnapshotGroup(), metadata + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=compute.GetIamPolicyRegionInstantSnapshotGroupRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetIamPolicyRegionInstantSnapshotGroupRequest, + dict, + ], +) +def test_get_iam_policy_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "post_get_iam_policy" + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "pre_get_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetIamPolicyRegionInstantSnapshotGroupRequest.pb( + compute.GetIamPolicyRegionInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.GetIamPolicyRegionInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_insert_rest_bad_request( + request_type=compute.InsertRegionInstantSnapshotGroupRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.insert(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionInstantSnapshotGroupRequest, + dict, + ], +) +def test_insert_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["instant_snapshot_group_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "id": 205, + "kind": "kind_value", + "name": "name_value", + "region": "region_value", + "resource_status": { + "consistency_membership_resolution_time": "consistency_membership_resolution_time_value", + "source_info": { + "consistency_group": "consistency_group_value", + "consistency_group_id": "consistency_group_id_value", + }, + }, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "source_consistency_group": "source_consistency_group_value", + "status": "status_value", + "zone": "zone_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionInstantSnapshotGroupRequest.meta.fields[ + "instant_snapshot_group_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "instant_snapshot_group_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["instant_snapshot_group_resource"][field]) + ): + del request_init["instant_snapshot_group_resource"][field][i][ + subfield + ] + else: + del request_init["instant_snapshot_group_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "post_insert" + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "pre_insert" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.InsertRegionInstantSnapshotGroupRequest.pb( + compute.InsertRegionInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.InsertRegionInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_rest_bad_request( + request_type=compute.ListRegionInstantSnapshotGroupsRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListRegionInstantSnapshotGroupsRequest, + dict, + ], +) +def test_list_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.ListInstantSnapshotGroups( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.ListInstantSnapshotGroups.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "post_list" + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_list_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "pre_list" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListRegionInstantSnapshotGroupsRequest.pb( + compute.ListRegionInstantSnapshotGroupsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.ListInstantSnapshotGroups.to_json( + compute.ListInstantSnapshotGroups() + ) + req.return_value.content = return_value + + request = compute.ListRegionInstantSnapshotGroupsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.ListInstantSnapshotGroups() + post_with_metadata.return_value = compute.ListInstantSnapshotGroups(), metadata + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + request_type=compute.SetIamPolicyRegionInstantSnapshotGroupRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyRegionInstantSnapshotGroupRequest, + dict, + ], +) +def test_set_iam_policy_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionInstantSnapshotGroupRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "post_set_iam_policy" + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, "pre_set_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.SetIamPolicyRegionInstantSnapshotGroupRequest.pb( + compute.SetIamPolicyRegionInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.SetIamPolicyRegionInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + request_type=compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, +): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.TestIamPermissionsRegionInstantSnapshotGroupRequest, + dict, + ], +) +def test_test_iam_permissions_rest_call_success(request_type): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = ( + compute.TestIamPermissionsRegionInstantSnapshotGroupRequest.meta.fields[ + "test_permissions_request_resource" + ] + ) + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.RegionInstantSnapshotGroupsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionInstantSnapshotGroupsRestInterceptor(), + ) + client = RegionInstantSnapshotGroupsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_test_iam_permissions", + ) as post, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionInstantSnapshotGroupsRestInterceptor, + "pre_test_iam_permissions", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.TestIamPermissionsRegionInstantSnapshotGroupRequest.pb( + compute.TestIamPermissionsRegionInstantSnapshotGroupRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) + req.return_value.content = return_value + + request = compute.TestIamPermissionsRegionInstantSnapshotGroupRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_unary_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete), "__call__") as call: + client.delete_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.DeleteRegionInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetRegionInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetIamPolicyRegionInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_insert_unary_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.insert), "__call__") as call: + client.insert_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.InsertRegionInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list), "__call__") as call: + client.list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListRegionInstantSnapshotGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.SetIamPolicyRegionInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.TestIamPermissionsRegionInstantSnapshotGroupRequest() + + assert args[0] == request_msg + + +def test_region_instant_snapshot_groups_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionInstantSnapshotGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_region_instant_snapshot_groups_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.region_instant_snapshot_groups.transports.RegionInstantSnapshotGroupsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RegionInstantSnapshotGroupsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete", + "get", + "get_iam_policy", + "insert", + "list", + "set_iam_policy", + "test_iam_permissions", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_instant_snapshot_groups_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.region_instant_snapshot_groups.transports.RegionInstantSnapshotGroupsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstantSnapshotGroupsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_region_instant_snapshot_groups_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.region_instant_snapshot_groups.transports.RegionInstantSnapshotGroupsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionInstantSnapshotGroupsTransport() + adc.assert_called_once() + + +def test_region_instant_snapshot_groups_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionInstantSnapshotGroupsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_region_instant_snapshot_groups_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RegionInstantSnapshotGroupsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_instant_snapshot_groups_host_no_port(transport_name): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_instant_snapshot_groups_host_with_port(transport_name): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_instant_snapshot_groups_client_transport_session_collision( + transport_name, +): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionInstantSnapshotGroupsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionInstantSnapshotGroupsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RegionInstantSnapshotGroupsClient.common_billing_account_path( + billing_account + ) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionInstantSnapshotGroupsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstantSnapshotGroupsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RegionInstantSnapshotGroupsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionInstantSnapshotGroupsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstantSnapshotGroupsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RegionInstantSnapshotGroupsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionInstantSnapshotGroupsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstantSnapshotGroupsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RegionInstantSnapshotGroupsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionInstantSnapshotGroupsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstantSnapshotGroupsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RegionInstantSnapshotGroupsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionInstantSnapshotGroupsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionInstantSnapshotGroupsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RegionInstantSnapshotGroupsTransport, "_prep_wrapped_messages" + ) as prep: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RegionInstantSnapshotGroupsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RegionInstantSnapshotGroupsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionInstantSnapshotGroupsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + RegionInstantSnapshotGroupsClient, + transports.RegionInstantSnapshotGroupsRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py index 5e8f32905fd2..7ca9223a26b2 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_instant_snapshots.py @@ -3895,6 +3895,8 @@ def test_get_rest_call_success(request_type): self_link_with_id="self_link_with_id_value", source_disk="source_disk_value", source_disk_id="source_disk_id_value", + source_instant_snapshot_group="source_instant_snapshot_group_value", + source_instant_snapshot_group_id="source_instant_snapshot_group_id_value", status="status_value", zone="zone_value", ) @@ -3928,6 +3930,13 @@ def test_get_rest_call_success(request_type): assert response.self_link_with_id == "self_link_with_id_value" assert response.source_disk == "source_disk_value" assert response.source_disk_id == "source_disk_id_value" + assert ( + response.source_instant_snapshot_group == "source_instant_snapshot_group_value" + ) + assert ( + response.source_instant_snapshot_group_id + == "source_instant_snapshot_group_id_value" + ) assert response.status == "status_value" assert response.zone == "zone_value" @@ -4190,6 +4199,8 @@ def test_insert_rest_call_success(request_type): "self_link_with_id": "self_link_with_id_value", "source_disk": "source_disk_value", "source_disk_id": "source_disk_id_value", + "source_instant_snapshot_group": "source_instant_snapshot_group_value", + "source_instant_snapshot_group_id": "source_instant_snapshot_group_id_value", "status": "status_value", "zone": "zone_value", } diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_snapshot_settings.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_snapshot_settings.py new file mode 100644 index 000000000000..af196b3c90c2 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_snapshot_settings.py @@ -0,0 +1,2705 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.region_snapshot_settings import ( + RegionSnapshotSettingsClient, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert RegionSnapshotSettingsClient._get_default_mtls_endpoint(None) is None + assert ( + RegionSnapshotSettingsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionSnapshotSettingsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionSnapshotSettingsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionSnapshotSettingsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionSnapshotSettingsClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + RegionSnapshotSettingsClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + RegionSnapshotSettingsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionSnapshotSettingsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RegionSnapshotSettingsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + RegionSnapshotSettingsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert RegionSnapshotSettingsClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert ( + RegionSnapshotSettingsClient._use_client_cert_effective() is False + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert RegionSnapshotSettingsClient._get_client_cert_source(None, False) is None + assert ( + RegionSnapshotSettingsClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + RegionSnapshotSettingsClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RegionSnapshotSettingsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RegionSnapshotSettingsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RegionSnapshotSettingsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotSettingsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RegionSnapshotSettingsClient._DEFAULT_UNIVERSE + default_endpoint = RegionSnapshotSettingsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionSnapshotSettingsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RegionSnapshotSettingsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == RegionSnapshotSettingsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RegionSnapshotSettingsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + RegionSnapshotSettingsClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionSnapshotSettingsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RegionSnapshotSettingsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RegionSnapshotSettingsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + RegionSnapshotSettingsClient._get_universe_domain(None, None) + == RegionSnapshotSettingsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RegionSnapshotSettingsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionSnapshotSettingsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionSnapshotSettingsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionSnapshotSettingsClient, "rest"), + ], +) +def test_region_snapshot_settings_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RegionSnapshotSettingsRestTransport, "rest"), + ], +) +def test_region_snapshot_settings_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionSnapshotSettingsClient, "rest"), + ], +) +def test_region_snapshot_settings_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_region_snapshot_settings_client_get_transport_class(): + transport = RegionSnapshotSettingsClient.get_transport_class() + available_transports = [ + transports.RegionSnapshotSettingsRestTransport, + ] + assert transport in available_transports + + transport = RegionSnapshotSettingsClient.get_transport_class("rest") + assert transport == transports.RegionSnapshotSettingsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionSnapshotSettingsClient, + transports.RegionSnapshotSettingsRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + RegionSnapshotSettingsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotSettingsClient), +) +def test_region_snapshot_settings_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionSnapshotSettingsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionSnapshotSettingsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RegionSnapshotSettingsClient, + transports.RegionSnapshotSettingsRestTransport, + "rest", + "true", + ), + ( + RegionSnapshotSettingsClient, + transports.RegionSnapshotSettingsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RegionSnapshotSettingsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotSettingsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_snapshot_settings_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [RegionSnapshotSettingsClient]) +@mock.patch.object( + RegionSnapshotSettingsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionSnapshotSettingsClient), +) +def test_region_snapshot_settings_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [RegionSnapshotSettingsClient]) +@mock.patch.object( + RegionSnapshotSettingsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotSettingsClient), +) +def test_region_snapshot_settings_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RegionSnapshotSettingsClient._DEFAULT_UNIVERSE + default_endpoint = RegionSnapshotSettingsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionSnapshotSettingsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + RegionSnapshotSettingsClient, + transports.RegionSnapshotSettingsRestTransport, + "rest", + ), + ], +) +def test_region_snapshot_settings_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + RegionSnapshotSettingsClient, + transports.RegionSnapshotSettingsRestTransport, + "rest", + None, + ), + ], +) +def test_region_snapshot_settings_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetRegionSnapshotSettingRequest): + transport_class = transports.RegionSnapshotSettingsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SnapshotSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + ) + ) + ) + + +def test_get_rest_flattened(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SnapshotSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshotSettings" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionSnapshotSettingRequest(), + project="project_value", + region="region_value", + ) + + +def test_patch_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_patch_rest_required_fields( + request_type=compute.PatchRegionSnapshotSettingRequest, +): + transport_class = transports.RegionSnapshotSettingsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.patch(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_rest_unset_required_fields(): + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "region", + "snapshotSettingsResource", + ) + ) + ) + + +def test_patch_rest_flattened(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot_settings_resource=compute.SnapshotSettings( + access_location=compute.SnapshotSettingsAccessLocation( + locations={ + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" + ) + } + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.patch(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshotSettings" + % client.transport._host, + args[1], + ) + + +def test_patch_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch( + compute.PatchRegionSnapshotSettingRequest(), + project="project_value", + region="region_value", + snapshot_settings_resource=compute.SnapshotSettings( + access_location=compute.SnapshotSettingsAccessLocation( + locations={ + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" + ) + } + ) + ), + ) + + +def test_patch_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.patch in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.patch] = mock_rpc + + request = {} + client.patch_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.patch_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_patch_unary_rest_required_fields( + request_type=compute.PatchRegionSnapshotSettingRequest, +): + transport_class = transports.RegionSnapshotSettingsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).patch._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.patch_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_patch_unary_rest_unset_required_fields(): + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.patch._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set( + ( + "project", + "region", + "snapshotSettingsResource", + ) + ) + ) + + +def test_patch_unary_rest_flattened(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot_settings_resource=compute.SnapshotSettings( + access_location=compute.SnapshotSettingsAccessLocation( + locations={ + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" + ) + } + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.patch_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshotSettings" + % client.transport._host, + args[1], + ) + + +def test_patch_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.patch_unary( + compute.PatchRegionSnapshotSettingRequest(), + project="project_value", + region="region_value", + snapshot_settings_resource=compute.SnapshotSettings( + access_location=compute.SnapshotSettingsAccessLocation( + locations={ + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" + ) + } + ) + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSnapshotSettingsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSnapshotSettingsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSnapshotSettingsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSnapshotSettingsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionSnapshotSettingsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionSnapshotSettingsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = RegionSnapshotSettingsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_get_rest_bad_request(request_type=compute.GetRegionSnapshotSettingRequest): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRegionSnapshotSettingRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotSettings() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SnapshotSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.SnapshotSettings) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotSettingsRestInterceptor(), + ) + client = RegionSnapshotSettingsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotSettingsRestInterceptor, "post_get" + ) as post, + mock.patch.object( + transports.RegionSnapshotSettingsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotSettingsRestInterceptor, "pre_get" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetRegionSnapshotSettingRequest.pb( + compute.GetRegionSnapshotSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.SnapshotSettings.to_json(compute.SnapshotSettings()) + req.return_value.content = return_value + + request = compute.GetRegionSnapshotSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SnapshotSettings() + post_with_metadata.return_value = compute.SnapshotSettings(), metadata + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_patch_rest_bad_request(request_type=compute.PatchRegionSnapshotSettingRequest): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.patch(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.PatchRegionSnapshotSettingRequest, + dict, + ], +) +def test_patch_rest_call_success(request_type): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["snapshot_settings_resource"] = { + "access_location": {"locations": {}, "policy": "policy_value"}, + "storage_location": {"locations": {}, "policy": "policy_value"}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.PatchRegionSnapshotSettingRequest.meta.fields[ + "snapshot_settings_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "snapshot_settings_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["snapshot_settings_resource"][field]) + ): + del request_init["snapshot_settings_resource"][field][i][subfield] + else: + del request_init["snapshot_settings_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.patch(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_patch_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotSettingsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotSettingsRestInterceptor(), + ) + client = RegionSnapshotSettingsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotSettingsRestInterceptor, "post_patch" + ) as post, + mock.patch.object( + transports.RegionSnapshotSettingsRestInterceptor, "post_patch_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotSettingsRestInterceptor, "pre_patch" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.PatchRegionSnapshotSettingRequest.pb( + compute.PatchRegionSnapshotSettingRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.PatchRegionSnapshotSettingRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.patch( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetRegionSnapshotSettingRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_patch_unary_empty_call_rest(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.patch), "__call__") as call: + client.patch_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.PatchRegionSnapshotSettingRequest() + + assert args[0] == request_msg + + +def test_region_snapshot_settings_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionSnapshotSettingsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_region_snapshot_settings_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.region_snapshot_settings.transports.RegionSnapshotSettingsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RegionSnapshotSettingsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "get", + "patch", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_snapshot_settings_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.region_snapshot_settings.transports.RegionSnapshotSettingsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSnapshotSettingsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_region_snapshot_settings_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.region_snapshot_settings.transports.RegionSnapshotSettingsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSnapshotSettingsTransport() + adc.assert_called_once() + + +def test_region_snapshot_settings_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionSnapshotSettingsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_region_snapshot_settings_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RegionSnapshotSettingsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_snapshot_settings_host_no_port(transport_name): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_snapshot_settings_host_with_port(transport_name): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_snapshot_settings_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionSnapshotSettingsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionSnapshotSettingsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.patch._session + session2 = client2.transport.patch._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RegionSnapshotSettingsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionSnapshotSettingsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotSettingsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RegionSnapshotSettingsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionSnapshotSettingsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotSettingsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RegionSnapshotSettingsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionSnapshotSettingsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotSettingsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RegionSnapshotSettingsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionSnapshotSettingsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotSettingsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RegionSnapshotSettingsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionSnapshotSettingsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotSettingsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RegionSnapshotSettingsTransport, "_prep_wrapped_messages" + ) as prep: + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RegionSnapshotSettingsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RegionSnapshotSettingsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionSnapshotSettingsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RegionSnapshotSettingsClient, transports.RegionSnapshotSettingsRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_snapshots.py new file mode 100644 index 000000000000..08cc3cb2b255 --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_snapshots.py @@ -0,0 +1,6508 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.region_snapshots import ( + RegionSnapshotsClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert RegionSnapshotsClient._get_default_mtls_endpoint(None) is None + assert ( + RegionSnapshotsClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionSnapshotsClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + RegionSnapshotsClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionSnapshotsClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + RegionSnapshotsClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + assert ( + RegionSnapshotsClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert RegionSnapshotsClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionSnapshotsClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert RegionSnapshotsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + RegionSnapshotsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert RegionSnapshotsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert RegionSnapshotsClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert RegionSnapshotsClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert RegionSnapshotsClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionSnapshotsClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert RegionSnapshotsClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert RegionSnapshotsClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert RegionSnapshotsClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert RegionSnapshotsClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert RegionSnapshotsClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + RegionSnapshotsClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert RegionSnapshotsClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert RegionSnapshotsClient._get_client_cert_source(None, False) is None + assert ( + RegionSnapshotsClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + RegionSnapshotsClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + RegionSnapshotsClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + RegionSnapshotsClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + RegionSnapshotsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotsClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = RegionSnapshotsClient._DEFAULT_UNIVERSE + default_endpoint = RegionSnapshotsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionSnapshotsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + RegionSnapshotsClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + RegionSnapshotsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == RegionSnapshotsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionSnapshotsClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + RegionSnapshotsClient._get_api_endpoint(None, None, default_universe, "always") + == RegionSnapshotsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionSnapshotsClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == RegionSnapshotsClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + RegionSnapshotsClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + RegionSnapshotsClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + RegionSnapshotsClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + RegionSnapshotsClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + RegionSnapshotsClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + RegionSnapshotsClient._get_universe_domain(None, None) + == RegionSnapshotsClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + RegionSnapshotsClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = RegionSnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = RegionSnapshotsClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionSnapshotsClient, "rest"), + ], +) +def test_region_snapshots_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.RegionSnapshotsRestTransport, "rest"), + ], +) +def test_region_snapshots_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (RegionSnapshotsClient, "rest"), + ], +) +def test_region_snapshots_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_region_snapshots_client_get_transport_class(): + transport = RegionSnapshotsClient.get_transport_class() + available_transports = [ + transports.RegionSnapshotsRestTransport, + ] + assert transport in available_transports + + transport = RegionSnapshotsClient.get_transport_class("rest") + assert transport == transports.RegionSnapshotsRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RegionSnapshotsClient, transports.RegionSnapshotsRestTransport, "rest"), + ], +) +@mock.patch.object( + RegionSnapshotsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotsClient), +) +def test_region_snapshots_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(RegionSnapshotsClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(RegionSnapshotsClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + RegionSnapshotsClient, + transports.RegionSnapshotsRestTransport, + "rest", + "true", + ), + ( + RegionSnapshotsClient, + transports.RegionSnapshotsRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + RegionSnapshotsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotsClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_region_snapshots_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [RegionSnapshotsClient]) +@mock.patch.object( + RegionSnapshotsClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(RegionSnapshotsClient), +) +def test_region_snapshots_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [RegionSnapshotsClient]) +@mock.patch.object( + RegionSnapshotsClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(RegionSnapshotsClient), +) +def test_region_snapshots_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = RegionSnapshotsClient._DEFAULT_UNIVERSE + default_endpoint = RegionSnapshotsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = RegionSnapshotsClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (RegionSnapshotsClient, transports.RegionSnapshotsRestTransport, "rest"), + ], +) +def test_region_snapshots_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + (RegionSnapshotsClient, transports.RegionSnapshotsRestTransport, "rest", None), + ], +) +def test_region_snapshots_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields(request_type=compute.DeleteRegionSnapshotRequest): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "snapshot", + ) + ) + ) + + +def test_delete_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "snapshot", + ) + ) + ) + + +def test_delete_unary_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetRegionSnapshotRequest): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Snapshot() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "snapshot", + ) + ) + ) + + +def test_get_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Snapshot() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + ) + + +def test_get_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_iam_policy] = mock_rpc + + request = {} + client.get_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_iam_policy_rest_required_fields( + request_type=compute.GetIamPolicyRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_iam_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("options_requested_policy_version",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_iam_policy_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("optionsRequestedPolicyVersion",)) + & set( + ( + "project", + "region", + "resource", + ) + ) + ) + + +def test_get_iam_policy_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/getIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_get_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_iam_policy( + compute.GetIamPolicyRegionSnapshotRequest(), + project="project_value", + region="region_value", + resource="resource_value", + ) + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields(request_type=compute.InsertRegionSnapshotRequest): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "snapshotResource", + ) + ) + ) + + +def test_insert_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot_resource=compute.Snapshot(architecture="architecture_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot_resource=compute.Snapshot(architecture="architecture_value"), + ) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "snapshotResource", + ) + ) + ) + + +def test_insert_unary_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot_resource=compute.Snapshot(architecture="architecture_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot_resource=compute.Snapshot(architecture="architecture_value"), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields(request_type=compute.ListRegionSnapshotsRequest): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "region", + ) + ) + ) + + +def test_list_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "region": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListRegionSnapshotsRequest(), + project="project_value", + region="region_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.SnapshotList( + items=[ + compute.Snapshot(), + compute.Snapshot(), + compute.Snapshot(), + ], + next_page_token="abc", + ), + compute.SnapshotList( + items=[], + next_page_token="def", + ), + compute.SnapshotList( + items=[ + compute.Snapshot(), + ], + next_page_token="ghi", + ), + compute.SnapshotList( + items=[ + compute.Snapshot(), + compute.Snapshot(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.SnapshotList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "region": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.Snapshot) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_set_iam_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_iam_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_iam_policy] = mock_rpc + + request = {} + client.set_iam_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.set_iam_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_iam_policy_rest_required_fields( + request_type=compute.SetIamPolicyRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_iam_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_iam_policy(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_iam_policy_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_iam_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "regionSetPolicyRequestResource", + "resource", + ) + ) + ) + + +def test_set_iam_policy_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_iam_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/setIamPolicy" + % client.transport._host, + args[1], + ) + + +def test_set_iam_policy_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_iam_policy( + compute.SetIamPolicyRegionSnapshotRequest(), + project="project_value", + region="region_value", + resource="resource_value", + region_set_policy_request_resource=compute.RegionSetPolicyRequest( + bindings=[compute.Binding(binding_id="binding_id_value")] + ), + ) + + +def test_set_labels_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_labels_rest_required_fields( + request_type=compute.SetLabelsRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_labels(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_labels_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "regionSetLabelsRequestResource", + "resource", + ) + ) + ) + + +def test_set_labels_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_labels(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels( + compute.SetLabelsRegionSnapshotRequest(), + project="project_value", + region="region_value", + resource="resource_value", + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), + ) + + +def test_set_labels_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.set_labels in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.set_labels] = mock_rpc + + request = {} + client.set_labels_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.set_labels_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_set_labels_unary_rest_required_fields( + request_type=compute.SetLabelsRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_labels._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).set_labels._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.set_labels_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_set_labels_unary_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.set_labels._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "regionSetLabelsRequestResource", + "resource", + ) + ) + ) + + +def test_set_labels_unary_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.set_labels_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/setLabels" + % client.transport._host, + args[1], + ) + + +def test_set_labels_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.set_labels_unary( + compute.SetLabelsRegionSnapshotRequest(), + project="project_value", + region="region_value", + resource="resource_value", + region_set_labels_request_resource=compute.RegionSetLabelsRequest( + label_fingerprint="label_fingerprint_value" + ), + ) + + +def test_test_iam_permissions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_iam_permissions in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.test_iam_permissions] = ( + mock_rpc + ) + + request = {} + client.test_iam_permissions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_iam_permissions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_iam_permissions_rest_required_fields( + request_type=compute.TestIamPermissionsRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["resource"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["resource"] = "resource_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_iam_permissions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "resource" in jsonified_request + assert jsonified_request["resource"] == "resource_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_iam_permissions(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_iam_permissions_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_iam_permissions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "region", + "resource", + "testPermissionsRequestResource", + ) + ) + ) + + +def test_test_iam_permissions_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "resource": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.test_iam_permissions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{resource}/testIamPermissions" + % client.transport._host, + args[1], + ) + + +def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.test_iam_permissions( + compute.TestIamPermissionsRegionSnapshotRequest(), + project="project_value", + region="region_value", + resource="resource_value", + test_permissions_request_resource=compute.TestPermissionsRequest( + permissions=["permissions_value"] + ), + ) + + +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "regionSnapshotUpdateKmsKeyRequestResource", + "snapshot", + ) + ) + ) + + +def test_update_kms_key_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeyRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeyRegionSnapshotRequest, +): + transport_class = transports.RegionSnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["region"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["region"] = "region_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "region" in jsonified_request + assert jsonified_request["region"] == "region_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "region", + "regionSnapshotUpdateKmsKeyRequestResource", + "snapshot", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "region": "sample2", + "snapshot": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/regions/{region}/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeyRegionSnapshotRequest(), + project="project_value", + region="region_value", + snapshot="snapshot_value", + region_snapshot_update_kms_key_request_resource=compute.RegionSnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSnapshotsClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSnapshotsClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = RegionSnapshotsClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = RegionSnapshotsClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = RegionSnapshotsClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.RegionSnapshotsRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = RegionSnapshotsClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_delete_rest_bad_request(request_type=compute.DeleteRegionSnapshotRequest): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteRegionSnapshotRequest, + dict, + ], +) +def test_delete_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_delete" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_delete_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_delete" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.DeleteRegionSnapshotRequest.pb( + compute.DeleteRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.DeleteRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_rest_bad_request(request_type=compute.GetRegionSnapshotRequest): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetRegionSnapshotRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Snapshot( + architecture="architecture_value", + auto_created=True, + chain_name="chain_name_value", + creation_size_bytes=2037, + creation_timestamp="creation_timestamp_value", + description="description_value", + disk_size_gb=1261, + download_bytes=1502, + enable_confidential_compute=True, + guest_flush=True, + id=205, + kind="kind_value", + label_fingerprint="label_fingerprint_value", + license_codes=[1360], + licenses=["licenses_value"], + location_hint="location_hint_value", + name="name_value", + region="region_value", + satisfies_pzi=True, + satisfies_pzs=True, + self_link="self_link_value", + snapshot_group_id="snapshot_group_id_value", + snapshot_group_name="snapshot_group_name_value", + snapshot_type="snapshot_type_value", + source_disk="source_disk_value", + source_disk_for_recovery_checkpoint="source_disk_for_recovery_checkpoint_value", + source_disk_id="source_disk_id_value", + source_instant_snapshot="source_instant_snapshot_value", + source_instant_snapshot_id="source_instant_snapshot_id_value", + source_snapshot_schedule_policy="source_snapshot_schedule_policy_value", + source_snapshot_schedule_policy_id="source_snapshot_schedule_policy_id_value", + status="status_value", + storage_bytes=1403, + storage_bytes_status="storage_bytes_status_value", + storage_locations=["storage_locations_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Snapshot.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Snapshot) + assert response.architecture == "architecture_value" + assert response.auto_created is True + assert response.chain_name == "chain_name_value" + assert response.creation_size_bytes == 2037 + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.disk_size_gb == 1261 + assert response.download_bytes == 1502 + assert response.enable_confidential_compute is True + assert response.guest_flush is True + assert response.id == 205 + assert response.kind == "kind_value" + assert response.label_fingerprint == "label_fingerprint_value" + assert response.license_codes == [1360] + assert response.licenses == ["licenses_value"] + assert response.location_hint == "location_hint_value" + assert response.name == "name_value" + assert response.region == "region_value" + assert response.satisfies_pzi is True + assert response.satisfies_pzs is True + assert response.self_link == "self_link_value" + assert response.snapshot_group_id == "snapshot_group_id_value" + assert response.snapshot_group_name == "snapshot_group_name_value" + assert response.snapshot_type == "snapshot_type_value" + assert response.source_disk == "source_disk_value" + assert ( + response.source_disk_for_recovery_checkpoint + == "source_disk_for_recovery_checkpoint_value" + ) + assert response.source_disk_id == "source_disk_id_value" + assert response.source_instant_snapshot == "source_instant_snapshot_value" + assert response.source_instant_snapshot_id == "source_instant_snapshot_id_value" + assert ( + response.source_snapshot_schedule_policy + == "source_snapshot_schedule_policy_value" + ) + assert ( + response.source_snapshot_schedule_policy_id + == "source_snapshot_schedule_policy_id_value" + ) + assert response.status == "status_value" + assert response.storage_bytes == 1403 + assert response.storage_bytes_status == "storage_bytes_status_value" + assert response.storage_locations == ["storage_locations_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_get" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.RegionSnapshotsRestInterceptor, "pre_get") as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetRegionSnapshotRequest.pb( + compute.GetRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Snapshot.to_json(compute.Snapshot()) + req.return_value.content = return_value + + request = compute.GetRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Snapshot() + post_with_metadata.return_value = compute.Snapshot(), metadata + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_iam_policy_rest_bad_request( + request_type=compute.GetIamPolicyRegionSnapshotRequest, +): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetIamPolicyRegionSnapshotRequest, + dict, + ], +) +def test_get_iam_policy_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_get_iam_policy" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, + "post_get_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_get_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetIamPolicyRegionSnapshotRequest.pb( + compute.GetIamPolicyRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.GetIamPolicyRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.get_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_insert_rest_bad_request(request_type=compute.InsertRegionSnapshotRequest): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.insert(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertRegionSnapshotRequest, + dict, + ], +) +def test_insert_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request_init["snapshot_resource"] = { + "architecture": "architecture_value", + "auto_created": True, + "chain_name": "chain_name_value", + "creation_size_bytes": 2037, + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "disk_size_gb": 1261, + "download_bytes": 1502, + "enable_confidential_compute": True, + "guest_flush": True, + "guest_os_features": [{"type_": "type__value"}], + "id": 205, + "kind": "kind_value", + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + "license_codes": [1361, 1362], + "licenses": ["licenses_value1", "licenses_value2"], + "location_hint": "location_hint_value", + "name": "name_value", + "params": {"resource_manager_tags": {}}, + "region": "region_value", + "satisfies_pzi": True, + "satisfies_pzs": True, + "self_link": "self_link_value", + "snapshot_encryption_key": { + "kms_key_name": "kms_key_name_value", + "kms_key_service_account": "kms_key_service_account_value", + "raw_key": "raw_key_value", + "rsa_encrypted_key": "rsa_encrypted_key_value", + "sha256": "sha256_value", + }, + "snapshot_group_id": "snapshot_group_id_value", + "snapshot_group_name": "snapshot_group_name_value", + "snapshot_type": "snapshot_type_value", + "source_disk": "source_disk_value", + "source_disk_encryption_key": {}, + "source_disk_for_recovery_checkpoint": "source_disk_for_recovery_checkpoint_value", + "source_disk_id": "source_disk_id_value", + "source_instant_snapshot": "source_instant_snapshot_value", + "source_instant_snapshot_encryption_key": {}, + "source_instant_snapshot_id": "source_instant_snapshot_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", + "status": "status_value", + "storage_bytes": 1403, + "storage_bytes_status": "storage_bytes_status_value", + "storage_locations": ["storage_locations_value1", "storage_locations_value2"], + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertRegionSnapshotRequest.meta.fields["snapshot_resource"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["snapshot_resource"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["snapshot_resource"][field])): + del request_init["snapshot_resource"][field][i][subfield] + else: + del request_init["snapshot_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_insert" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_insert_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_insert" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.InsertRegionSnapshotRequest.pb( + compute.InsertRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.InsertRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_rest_bad_request(request_type=compute.ListRegionSnapshotsRequest): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListRegionSnapshotsRequest, + dict, + ], +) +def test_list_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.SnapshotList( + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.SnapshotList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_list" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.RegionSnapshotsRestInterceptor, "pre_list") as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListRegionSnapshotsRequest.pb( + compute.ListRegionSnapshotsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.SnapshotList.to_json(compute.SnapshotList()) + req.return_value.content = return_value + + request = compute.ListRegionSnapshotsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.SnapshotList() + post_with_metadata.return_value = compute.SnapshotList(), metadata + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_iam_policy_rest_bad_request( + request_type=compute.SetIamPolicyRegionSnapshotRequest, +): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_iam_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetIamPolicyRegionSnapshotRequest, + dict, + ], +) +def test_set_iam_policy_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_policy_request_resource"] = { + "bindings": [ + { + "binding_id": "binding_id_value", + "condition": { + "description": "description_value", + "expression": "expression_value", + "location": "location_value", + "title": "title_value", + }, + "members": ["members_value1", "members_value2"], + "role": "role_value", + } + ], + "etag": "etag_value", + "policy": { + "audit_configs": [ + { + "audit_log_configs": [ + { + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "ignore_child_exemptions": True, + "log_type": "log_type_value", + } + ], + "exempted_members": [ + "exempted_members_value1", + "exempted_members_value2", + ], + "service": "service_value", + } + ], + "bindings": {}, + "etag": "etag_value", + "iam_owned": True, + "version": 774, + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetIamPolicyRegionSnapshotRequest.meta.fields[ + "region_set_policy_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_policy_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_policy_request_resource"][field]) + ): + del request_init["region_set_policy_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_policy_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Policy( + etag="etag_value", + iam_owned=True, + version=774, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Policy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_iam_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.Policy) + assert response.etag == "etag_value" + assert response.iam_owned is True + assert response.version == 774 + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_iam_policy_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_set_iam_policy" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, + "post_set_iam_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_set_iam_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.SetIamPolicyRegionSnapshotRequest.pb( + compute.SetIamPolicyRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Policy.to_json(compute.Policy()) + req.return_value.content = return_value + + request = compute.SetIamPolicyRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Policy() + post_with_metadata.return_value = compute.Policy(), metadata + + client.set_iam_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_set_labels_rest_bad_request( + request_type=compute.SetLabelsRegionSnapshotRequest, +): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.set_labels(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.SetLabelsRegionSnapshotRequest, + dict, + ], +) +def test_set_labels_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["region_set_labels_request_resource"] = { + "label_fingerprint": "label_fingerprint_value", + "labels": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.SetLabelsRegionSnapshotRequest.meta.fields[ + "region_set_labels_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_set_labels_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["region_set_labels_request_resource"][field]) + ): + del request_init["region_set_labels_request_resource"][field][i][ + subfield + ] + else: + del request_init["region_set_labels_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.set_labels(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_set_labels_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_set_labels" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_set_labels_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_set_labels" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.SetLabelsRegionSnapshotRequest.pb( + compute.SetLabelsRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.SetLabelsRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.set_labels( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_test_iam_permissions_rest_bad_request( + request_type=compute.TestIamPermissionsRegionSnapshotRequest, +): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.test_iam_permissions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.TestIamPermissionsRegionSnapshotRequest, + dict, + ], +) +def test_test_iam_permissions_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "resource": "sample3"} + request_init["test_permissions_request_resource"] = { + "permissions": ["permissions_value1", "permissions_value2"] + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.TestIamPermissionsRegionSnapshotRequest.meta.fields[ + "test_permissions_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "test_permissions_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["test_permissions_request_resource"][field]) + ): + del request_init["test_permissions_request_resource"][field][i][ + subfield + ] + else: + del request_init["test_permissions_request_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.TestPermissionsResponse( + permissions=["permissions_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.TestPermissionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.test_iam_permissions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.TestPermissionsResponse) + assert response.permissions == ["permissions_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_test_iam_permissions_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_test_iam_permissions" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, + "post_test_iam_permissions_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_test_iam_permissions" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.TestIamPermissionsRegionSnapshotRequest.pb( + compute.TestIamPermissionsRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.TestPermissionsResponse.to_json( + compute.TestPermissionsResponse() + ) + req.return_value.content = return_value + + request = compute.TestIamPermissionsRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.TestPermissionsResponse() + post_with_metadata.return_value = compute.TestPermissionsResponse(), metadata + + client.test_iam_permissions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_kms_key_rest_bad_request( + request_type=compute.UpdateKmsKeyRegionSnapshotRequest, +): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeyRegionSnapshotRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "region": "sample2", "snapshot": "sample3"} + request_init["region_snapshot_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeyRegionSnapshotRequest.meta.fields[ + "region_snapshot_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "region_snapshot_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["region_snapshot_update_kms_key_request_resource"][ + field + ] + ), + ): + del request_init["region_snapshot_update_kms_key_request_resource"][ + field + ][i][subfield] + else: + del request_init["region_snapshot_update_kms_key_request_resource"][ + field + ][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.RegionSnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.RegionSnapshotsRestInterceptor(), + ) + client = RegionSnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, + "post_update_kms_key_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.RegionSnapshotsRestInterceptor, "pre_update_kms_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeyRegionSnapshotRequest.pb( + compute.UpdateKmsKeyRegionSnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeyRegionSnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_unary_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete), "__call__") as call: + client.delete_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.DeleteRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_iam_policy_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetIamPolicyRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_insert_unary_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.insert), "__call__") as call: + client.insert_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.InsertRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list), "__call__") as call: + client.list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListRegionSnapshotsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_iam_policy_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.SetIamPolicyRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_set_labels_unary_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.set_labels), "__call__") as call: + client.set_labels_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.SetLabelsRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_test_iam_permissions_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.TestIamPermissionsRegionSnapshotRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeyRegionSnapshotRequest() + + assert args[0] == request_msg + + +def test_region_snapshots_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.RegionSnapshotsTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_region_snapshots_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.region_snapshots.transports.RegionSnapshotsTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.RegionSnapshotsTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete", + "get", + "get_iam_policy", + "insert", + "list", + "set_iam_policy", + "set_labels", + "test_iam_permissions", + "update_kms_key", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_region_snapshots_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.region_snapshots.transports.RegionSnapshotsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSnapshotsTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_region_snapshots_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.region_snapshots.transports.RegionSnapshotsTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.RegionSnapshotsTransport() + adc.assert_called_once() + + +def test_region_snapshots_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + RegionSnapshotsClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_region_snapshots_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.RegionSnapshotsRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_snapshots_host_no_port(transport_name): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_snapshots_host_with_port(transport_name): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_region_snapshots_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = RegionSnapshotsClient( + credentials=creds1, + transport=transport_name, + ) + client2 = RegionSnapshotsClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.get_iam_policy._session + session2 = client2.transport.get_iam_policy._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.set_iam_policy._session + session2 = client2.transport.set_iam_policy._session + assert session1 != session2 + session1 = client1.transport.set_labels._session + session2 = client2.transport.set_labels._session + assert session1 != session2 + session1 = client1.transport.test_iam_permissions._session + session2 = client2.transport.test_iam_permissions._session + assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = RegionSnapshotsClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = RegionSnapshotsClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotsClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = RegionSnapshotsClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = RegionSnapshotsClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotsClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = RegionSnapshotsClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = RegionSnapshotsClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotsClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = RegionSnapshotsClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = RegionSnapshotsClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotsClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = RegionSnapshotsClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = RegionSnapshotsClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = RegionSnapshotsClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.RegionSnapshotsTransport, "_prep_wrapped_messages" + ) as prep: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.RegionSnapshotsTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = RegionSnapshotsClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = RegionSnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (RegionSnapshotsClient, transports.RegionSnapshotsRestTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py index a0f53b154702..c31365d06eef 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_region_url_maps.py @@ -3959,6 +3959,41 @@ def test_insert_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -3974,10 +4009,7 @@ def test_insert_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { @@ -4514,6 +4546,41 @@ def test_patch_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -4529,10 +4596,7 @@ def test_patch_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { @@ -4939,6 +5003,41 @@ def test_update_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -4954,10 +5053,7 @@ def test_update_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { @@ -5365,6 +5461,41 @@ def test_validate_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -5383,10 +5514,7 @@ def test_validate_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py index 75cf3999215b..9c7e55bf23d8 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_reservations.py @@ -5117,6 +5117,7 @@ def test_get_rest_call_success(request_type): # Designate an appropriate value for the returned response. return_value = compute.Reservation( commitment="commitment_value", + confidential_compute_type="confidential_compute_type_value", creation_timestamp="creation_timestamp_value", delete_at_time="delete_at_time_value", deployment_type="deployment_type_value", @@ -5151,6 +5152,7 @@ def test_get_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Reservation) assert response.commitment == "commitment_value" + assert response.confidential_compute_type == "confidential_compute_type_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.delete_at_time == "delete_at_time_value" assert response.deployment_type == "deployment_type_value" @@ -5418,6 +5420,7 @@ def test_insert_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", @@ -6848,6 +6851,7 @@ def test_update_rest_call_success(request_type): "workload_type": "workload_type_value", }, "commitment": "commitment_value", + "confidential_compute_type": "confidential_compute_type_value", "creation_timestamp": "creation_timestamp_value", "delete_after_duration": {"nanos": 543, "seconds": 751}, "delete_at_time": "delete_at_time_value", diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py index d9227617b885..423a0253aaa5 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshot_settings_service.py @@ -1534,10 +1534,10 @@ def test_patch_rest_flattened(): mock_args = dict( project="project_value", snapshot_settings_resource=compute.SnapshotSettings( - storage_location=compute.SnapshotSettingsStorageLocationSettings( + access_location=compute.SnapshotSettingsAccessLocation( locations={ - "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( - name="name_value" + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" ) } ) @@ -1581,10 +1581,10 @@ def test_patch_rest_flattened_error(transport: str = "rest"): compute.PatchSnapshotSettingRequest(), project="project_value", snapshot_settings_resource=compute.SnapshotSettings( - storage_location=compute.SnapshotSettingsStorageLocationSettings( + access_location=compute.SnapshotSettingsAccessLocation( locations={ - "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( - name="name_value" + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" ) } ) @@ -1755,10 +1755,10 @@ def test_patch_unary_rest_flattened(): mock_args = dict( project="project_value", snapshot_settings_resource=compute.SnapshotSettings( - storage_location=compute.SnapshotSettingsStorageLocationSettings( + access_location=compute.SnapshotSettingsAccessLocation( locations={ - "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( - name="name_value" + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" ) } ) @@ -1802,10 +1802,10 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): compute.PatchSnapshotSettingRequest(), project="project_value", snapshot_settings_resource=compute.SnapshotSettings( - storage_location=compute.SnapshotSettingsStorageLocationSettings( + access_location=compute.SnapshotSettingsAccessLocation( locations={ - "key_value": compute.SnapshotSettingsStorageLocationSettingsStorageLocationPreference( - name="name_value" + "key_value": compute.SnapshotSettingsAccessLocationAccessLocationPreference( + region="region_value" ) } ) @@ -2059,7 +2059,8 @@ def test_patch_rest_call_success(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["snapshot_settings_resource"] = { - "storage_location": {"locations": {}, "policy": "policy_value"} + "access_location": {"locations": {}, "policy": "policy_value"}, + "storage_location": {"locations": {}, "policy": "policy_value"}, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py index 8840e80987a3..3e8595544991 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_snapshots.py @@ -3350,6 +3350,418 @@ def test_test_iam_permissions_rest_flattened_error(transport: str = "rest"): ) +def test_update_kms_key_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_rest_required_fields( + request_type=compute.UpdateKmsKeySnapshotRequest, +): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "snapshot", + "snapshotUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "snapshot": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key( + compute.UpdateKmsKeySnapshotRequest(), + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + +def test_update_kms_key_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_kms_key in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_kms_key] = mock_rpc + + request = {} + client.update_kms_key_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_kms_key_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_kms_key_unary_rest_required_fields( + request_type=compute.UpdateKmsKeySnapshotRequest, +): + transport_class = transports.SnapshotsRestTransport + + request_init = {} + request_init["project"] = "" + request_init["snapshot"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["snapshot"] = "snapshot_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_kms_key._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "snapshot" in jsonified_request + assert jsonified_request["snapshot"] == "snapshot_value" + + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_kms_key_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_kms_key_unary_rest_unset_required_fields(): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_kms_key._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "snapshot", + "snapshotUpdateKmsKeyRequestResource", + ) + ) + ) + + +def test_update_kms_key_unary_rest_flattened(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "snapshot": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_kms_key_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/global/snapshots/{snapshot}/updateKmsKey" + % client.transport._host, + args[1], + ) + + +def test_update_kms_key_unary_rest_flattened_error(transport: str = "rest"): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_kms_key_unary( + compute.UpdateKmsKeySnapshotRequest(), + project="project_value", + snapshot="snapshot_value", + snapshot_update_kms_key_request_resource=compute.SnapshotUpdateKmsKeyRequest( + kms_key_name="kms_key_name_value" + ), + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.SnapshotsRestTransport( @@ -3655,9 +4067,12 @@ def test_get_rest_call_success(request_type): licenses=["licenses_value"], location_hint="location_hint_value", name="name_value", + region="region_value", satisfies_pzi=True, satisfies_pzs=True, self_link="self_link_value", + snapshot_group_id="snapshot_group_id_value", + snapshot_group_name="snapshot_group_name_value", snapshot_type="snapshot_type_value", source_disk="source_disk_value", source_disk_for_recovery_checkpoint="source_disk_for_recovery_checkpoint_value", @@ -3703,9 +4118,12 @@ def test_get_rest_call_success(request_type): assert response.licenses == ["licenses_value"] assert response.location_hint == "location_hint_value" assert response.name == "name_value" + assert response.region == "region_value" assert response.satisfies_pzi is True assert response.satisfies_pzs is True assert response.self_link == "self_link_value" + assert response.snapshot_group_id == "snapshot_group_id_value" + assert response.snapshot_group_name == "snapshot_group_name_value" assert response.snapshot_type == "snapshot_type_value" assert response.source_disk == "source_disk_value" assert ( @@ -3976,6 +4394,7 @@ def test_insert_rest_call_success(request_type): "location_hint": "location_hint_value", "name": "name_value", "params": {"resource_manager_tags": {}}, + "region": "region_value", "satisfies_pzi": True, "satisfies_pzs": True, "self_link": "self_link_value", @@ -3986,6 +4405,8 @@ def test_insert_rest_call_success(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_group_id": "snapshot_group_id_value", + "snapshot_group_name": "snapshot_group_name_value", "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, @@ -5020,6 +5441,259 @@ def test_test_iam_permissions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_update_kms_key_rest_bad_request( + request_type=compute.UpdateKmsKeySnapshotRequest, +): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_kms_key(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateKmsKeySnapshotRequest, + dict, + ], +) +def test_update_kms_key_rest_call_success(request_type): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "snapshot": "sample2"} + request_init["snapshot_update_kms_key_request_resource"] = { + "kms_key_name": "kms_key_name_value" + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateKmsKeySnapshotRequest.meta.fields[ + "snapshot_update_kms_key_request_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "snapshot_update_kms_key_request_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, + len( + request_init["snapshot_update_kms_key_request_resource"][field] + ), + ): + del request_init["snapshot_update_kms_key_request_resource"][field][ + i + ][subfield] + else: + del request_init["snapshot_update_kms_key_request_resource"][field][ + subfield + ] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_kms_key(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_kms_key_rest_interceptors(null_interceptor): + transport = transports.SnapshotsRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.SnapshotsRestInterceptor(), + ) + client = SnapshotsClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.SnapshotsRestInterceptor, "post_update_kms_key" + ) as post, + mock.patch.object( + transports.SnapshotsRestInterceptor, "post_update_kms_key_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.SnapshotsRestInterceptor, "pre_update_kms_key" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateKmsKeySnapshotRequest.pb( + compute.UpdateKmsKeySnapshotRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateKmsKeySnapshotRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update_kms_key( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = SnapshotsClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -5189,6 +5863,26 @@ def test_test_iam_permissions_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_kms_key_unary_empty_call_rest(): + client = SnapshotsClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update_kms_key), "__call__") as call: + client.update_kms_key_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateKmsKeySnapshotRequest() + + assert args[0] == request_msg + + def test_snapshots_base_transport_error(): # Passing both a credentials object and credentials_file should raise an error with pytest.raises(core_exceptions.DuplicateCredentialArgs): @@ -5219,6 +5913,7 @@ def test_snapshots_base_transport(): "set_iam_policy", "set_labels", "test_iam_permissions", + "update_kms_key", ) for method in methods: with pytest.raises(NotImplementedError): @@ -5386,6 +6081,9 @@ def test_snapshots_client_transport_session_collision(transport_name): session1 = client1.transport.test_iam_permissions._session session2 = client2.transport.test_iam_permissions._session assert session1 != session2 + session1 = client1.transport.update_kms_key._session + session2 = client2.transport.update_kms_key._session + assert session1 != session2 def test_common_billing_account_path(): diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py index d812da4263e2..2c9f51373bac 100644 --- a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_url_maps.py @@ -4814,6 +4814,41 @@ def test_insert_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -4829,10 +4864,7 @@ def test_insert_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { @@ -5609,6 +5641,41 @@ def test_patch_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -5624,10 +5691,7 @@ def test_patch_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { @@ -6236,6 +6300,41 @@ def test_update_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -6251,10 +6350,7 @@ def test_update_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { @@ -6660,6 +6756,41 @@ def test_validate_rest_call_success(request_type): "error_service": "error_service_value", }, "default_route_action": { + "cache_policy": { + "cache_bypass_request_header_names": [ + "cache_bypass_request_header_names_value1", + "cache_bypass_request_header_names_value2", + ], + "cache_key_policy": { + "excluded_query_parameters": [ + "excluded_query_parameters_value1", + "excluded_query_parameters_value2", + ], + "include_host": True, + "include_protocol": True, + "include_query_string": True, + "included_cookie_names": [ + "included_cookie_names_value1", + "included_cookie_names_value2", + ], + "included_header_names": [ + "included_header_names_value1", + "included_header_names_value2", + ], + "included_query_parameters": [ + "included_query_parameters_value1", + "included_query_parameters_value2", + ], + }, + "cache_mode": "cache_mode_value", + "client_ttl": {"nanos": 543, "seconds": 751}, + "default_ttl": {}, + "max_ttl": {}, + "negative_caching": True, + "negative_caching_policy": [{"code": 411, "ttl": {}}], + "request_coalescing": True, + "serve_while_stale": {}, + }, "cors_policy": { "allow_credentials": True, "allow_headers": ["allow_headers_value1", "allow_headers_value2"], @@ -6678,10 +6809,7 @@ def test_validate_rest_call_success(request_type): }, "fault_injection_policy": { "abort": {"http_status": 1219, "percentage": 0.10540000000000001}, - "delay": { - "fixed_delay": {"nanos": 543, "seconds": 751}, - "percentage": 0.10540000000000001, - }, + "delay": {"fixed_delay": {}, "percentage": 0.10540000000000001}, }, "max_stream_duration": {}, "request_mirror_policy": { diff --git a/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_vm_extension_policies.py b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_vm_extension_policies.py new file mode 100644 index 000000000000..2d2f0e803a6d --- /dev/null +++ b/packages/google-cloud-compute/tests/unit/gapic/compute_v1/test_zone_vm_extension_policies.py @@ -0,0 +1,4510 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.extended_operation as extended_operation # type: ignore +import google.auth +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.oauth2 import service_account + +from google.cloud.compute_v1.services.zone_vm_extension_policies import ( + ZoneVmExtensionPoliciesClient, + pagers, + transports, +) +from google.cloud.compute_v1.types import compute + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(None) is None + assert ( + ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + ZoneVmExtensionPoliciesClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + ZoneVmExtensionPoliciesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + ZoneVmExtensionPoliciesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert ZoneVmExtensionPoliciesClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + ZoneVmExtensionPoliciesClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert ( + ZoneVmExtensionPoliciesClient._use_client_cert_effective() is False + ) + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert ZoneVmExtensionPoliciesClient._get_client_cert_source(None, False) is None + assert ( + ZoneVmExtensionPoliciesClient._get_client_cert_source( + mock_provided_cert_source, False + ) + is None + ) + assert ( + ZoneVmExtensionPoliciesClient._get_client_cert_source( + mock_provided_cert_source, True + ) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + ZoneVmExtensionPoliciesClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + ZoneVmExtensionPoliciesClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + ZoneVmExtensionPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ZoneVmExtensionPoliciesClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = ZoneVmExtensionPoliciesClient._DEFAULT_UNIVERSE + default_endpoint = ZoneVmExtensionPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ZoneVmExtensionPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == ZoneVmExtensionPoliciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, None, default_universe, "auto" + ) + == default_endpoint + ) + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, None, default_universe, "always" + ) + == ZoneVmExtensionPoliciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == ZoneVmExtensionPoliciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, None, mock_universe, "never" + ) + == mock_endpoint + ) + assert ( + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, None, default_universe, "never" + ) + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + ZoneVmExtensionPoliciesClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + ZoneVmExtensionPoliciesClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + ZoneVmExtensionPoliciesClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + ZoneVmExtensionPoliciesClient._get_universe_domain(None, None) + == ZoneVmExtensionPoliciesClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + ZoneVmExtensionPoliciesClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = ZoneVmExtensionPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = ZoneVmExtensionPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ZoneVmExtensionPoliciesClient, "rest"), + ], +) +def test_zone_vm_extension_policies_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.ZoneVmExtensionPoliciesRestTransport, "rest"), + ], +) +def test_zone_vm_extension_policies_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (ZoneVmExtensionPoliciesClient, "rest"), + ], +) +def test_zone_vm_extension_policies_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +def test_zone_vm_extension_policies_client_get_transport_class(): + transport = ZoneVmExtensionPoliciesClient.get_transport_class() + available_transports = [ + transports.ZoneVmExtensionPoliciesRestTransport, + ] + assert transport in available_transports + + transport = ZoneVmExtensionPoliciesClient.get_transport_class("rest") + assert transport == transports.ZoneVmExtensionPoliciesRestTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ZoneVmExtensionPoliciesClient, + transports.ZoneVmExtensionPoliciesRestTransport, + "rest", + ), + ], +) +@mock.patch.object( + ZoneVmExtensionPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ZoneVmExtensionPoliciesClient), +) +def test_zone_vm_extension_policies_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(ZoneVmExtensionPoliciesClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(ZoneVmExtensionPoliciesClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + ZoneVmExtensionPoliciesClient, + transports.ZoneVmExtensionPoliciesRestTransport, + "rest", + "true", + ), + ( + ZoneVmExtensionPoliciesClient, + transports.ZoneVmExtensionPoliciesRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + ZoneVmExtensionPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ZoneVmExtensionPoliciesClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_zone_vm_extension_policies_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ZoneVmExtensionPoliciesClient]) +@mock.patch.object( + ZoneVmExtensionPoliciesClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ZoneVmExtensionPoliciesClient), +) +def test_zone_vm_extension_policies_client_get_mtls_endpoint_and_cert_source( + client_class, +): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize("client_class", [ZoneVmExtensionPoliciesClient]) +@mock.patch.object( + ZoneVmExtensionPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(ZoneVmExtensionPoliciesClient), +) +def test_zone_vm_extension_policies_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = ZoneVmExtensionPoliciesClient._DEFAULT_UNIVERSE + default_endpoint = ZoneVmExtensionPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = ZoneVmExtensionPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + ( + ZoneVmExtensionPoliciesClient, + transports.ZoneVmExtensionPoliciesRestTransport, + "rest", + ), + ], +) +def test_zone_vm_extension_policies_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + ZoneVmExtensionPoliciesClient, + transports.ZoneVmExtensionPoliciesRestTransport, + "rest", + None, + ), + ], +) +def test_zone_vm_extension_policies_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_delete_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_rest_required_fields( + request_type=compute.DeleteZoneVmExtensionPolicyRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["vm_extension_policy"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["vmExtensionPolicy"] = "vm_extension_policy_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "vmExtensionPolicy" in jsonified_request + assert jsonified_request["vmExtensionPolicy"] == "vm_extension_policy_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "vmExtensionPolicy", + "zone", + ) + ) + ) + + +def test_delete_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}" + % client.transport._host, + args[1], + ) + + +def test_delete_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete( + compute.DeleteZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + ) + + +def test_delete_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete] = mock_rpc + + request = {} + client.delete_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_unary_rest_required_fields( + request_type=compute.DeleteZoneVmExtensionPolicyRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["vm_extension_policy"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["vmExtensionPolicy"] = "vm_extension_policy_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "vmExtensionPolicy" in jsonified_request + assert jsonified_request["vmExtensionPolicy"] == "vm_extension_policy_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_unary_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "vmExtensionPolicy", + "zone", + ) + ) + ) + + +def test_delete_unary_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}" + % client.transport._host, + args[1], + ) + + +def test_delete_unary_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_unary( + compute.DeleteZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + ) + + +def test_get_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get] = mock_rpc + + request = {} + client.get(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_rest_required_fields(request_type=compute.GetZoneVmExtensionPolicyRequest): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["vm_extension_policy"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["vmExtensionPolicy"] = "vm_extension_policy_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "vmExtensionPolicy" in jsonified_request + assert jsonified_request["vmExtensionPolicy"] == "vm_extension_policy_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VmExtensionPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.VmExtensionPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "project", + "vmExtensionPolicy", + "zone", + ) + ) + ) + + +def test_get_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.VmExtensionPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.VmExtensionPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}" + % client.transport._host, + args[1], + ) + + +def test_get_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get( + compute.GetZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + ) + + +def test_insert_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_rest_required_fields( + request_type=compute.InsertZoneVmExtensionPolicyRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "vmExtensionPolicyResource", + "zone", + ) + ) + ) + + +def test_insert_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert( + compute.InsertZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_insert_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.insert in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.insert] = mock_rpc + + request = {} + client.insert_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.insert_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_insert_unary_rest_required_fields( + request_type=compute.InsertZoneVmExtensionPolicyRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).insert._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.insert_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_insert_unary_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.insert._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "vmExtensionPolicyResource", + "zone", + ) + ) + ) + + +def test_insert_unary_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.insert_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies" + % client.transport._host, + args[1], + ) + + +def test_insert_unary_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.insert_unary( + compute.InsertZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_list_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list] = mock_rpc + + request = {} + client.list(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_rest_required_fields( + request_type=compute.ListZoneVmExtensionPoliciesRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "max_results", + "order_by", + "page_token", + "return_partial_success", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.VmExtensionPolicyList() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.VmExtensionPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "maxResults", + "orderBy", + "pageToken", + "returnPartialSuccess", + ) + ) + & set( + ( + "project", + "zone", + ) + ) + ) + + +def test_list_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.VmExtensionPolicyList() + + # get arguments that satisfy an http rule for this method + sample_request = {"project": "sample1", "zone": "sample2"} + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.VmExtensionPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list( + compute.ListZoneVmExtensionPoliciesRequest(), + project="project_value", + zone="zone_value", + ) + + +def test_list_rest_pager(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + compute.VmExtensionPolicyList( + items=[ + compute.VmExtensionPolicy(), + compute.VmExtensionPolicy(), + compute.VmExtensionPolicy(), + ], + next_page_token="abc", + ), + compute.VmExtensionPolicyList( + items=[], + next_page_token="def", + ), + compute.VmExtensionPolicyList( + items=[ + compute.VmExtensionPolicy(), + ], + next_page_token="ghi", + ), + compute.VmExtensionPolicyList( + items=[ + compute.VmExtensionPolicy(), + compute.VmExtensionPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(compute.VmExtensionPolicyList.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"project": "sample1", "zone": "sample2"} + + pager = client.list(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, compute.VmExtensionPolicy) for i in results) + + pages = list(client.list(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_update_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_rest_required_fields( + request_type=compute.UpdateZoneVmExtensionPolicyRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["vm_extension_policy"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["vmExtensionPolicy"] = "vm_extension_policy_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "vmExtensionPolicy" in jsonified_request + assert jsonified_request["vmExtensionPolicy"] == "vm_extension_policy_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "vmExtensionPolicy", + "vmExtensionPolicyResource", + "zone", + ) + ) + ) + + +def test_update_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}" + % client.transport._host, + args[1], + ) + + +def test_update_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update( + compute.UpdateZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_update_unary_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update] = mock_rpc + + request = {} + client.update_unary(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_unary(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_unary_rest_required_fields( + request_type=compute.UpdateZoneVmExtensionPolicyRequest, +): + transport_class = transports.ZoneVmExtensionPoliciesRestTransport + + request_init = {} + request_init["project"] = "" + request_init["vm_extension_policy"] = "" + request_init["zone"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["project"] = "project_value" + jsonified_request["vmExtensionPolicy"] = "vm_extension_policy_value" + jsonified_request["zone"] = "zone_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "project" in jsonified_request + assert jsonified_request["project"] == "project_value" + assert "vmExtensionPolicy" in jsonified_request + assert jsonified_request["vmExtensionPolicy"] == "vm_extension_policy_value" + assert "zone" in jsonified_request + assert jsonified_request["zone"] == "zone_value" + + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_unary(request) + + expected_params = [] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_unary_rest_unset_required_fields(): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("requestId",)) + & set( + ( + "project", + "vmExtensionPolicy", + "vmExtensionPolicyResource", + "zone", + ) + ) + ) + + +def test_update_unary_rest_flattened(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation() + + # get arguments that satisfy an http rule for this method + sample_request = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + + # get truthy value for each flattened field + mock_args = dict( + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_unary(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/compute/v1/projects/{project}/zones/{zone}/vmExtensionPolicies/{vm_extension_policy}" + % client.transport._host, + args[1], + ) + + +def test_update_unary_rest_flattened_error(transport: str = "rest"): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_unary( + compute.UpdateZoneVmExtensionPolicyRequest(), + project="project_value", + zone="zone_value", + vm_extension_policy="vm_extension_policy_value", + vm_extension_policy_resource=compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value" + ), + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZoneVmExtensionPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ZoneVmExtensionPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ZoneVmExtensionPoliciesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ZoneVmExtensionPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ZoneVmExtensionPoliciesClient(transport=transport) + assert client.transport is transport + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ZoneVmExtensionPoliciesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_rest(): + transport = ZoneVmExtensionPoliciesClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_delete_rest_bad_request( + request_type=compute.DeleteZoneVmExtensionPolicyRequest, +): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.DeleteZoneVmExtensionPolicyRequest, + dict, + ], +) +def test_delete_rest_call_success(request_type): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_rest_interceptors(null_interceptor): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ZoneVmExtensionPoliciesRestInterceptor(), + ) + client = ZoneVmExtensionPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_delete" + ) as post, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, + "post_delete_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "pre_delete" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.DeleteZoneVmExtensionPolicyRequest.pb( + compute.DeleteZoneVmExtensionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.DeleteZoneVmExtensionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.delete( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_rest_bad_request(request_type=compute.GetZoneVmExtensionPolicyRequest): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.GetZoneVmExtensionPolicyRequest, + dict, + ], +) +def test_get_rest_call_success(request_type): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.VmExtensionPolicy( + creation_timestamp="creation_timestamp_value", + description="description_value", + global_resource_link="global_resource_link_value", + id=205, + kind="kind_value", + managed_by_global=True, + name="name_value", + priority=898, + self_link="self_link_value", + self_link_with_id="self_link_with_id_value", + state="state_value", + update_timestamp="update_timestamp_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.VmExtensionPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, compute.VmExtensionPolicy) + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.global_resource_link == "global_resource_link_value" + assert response.id == 205 + assert response.kind == "kind_value" + assert response.managed_by_global is True + assert response.name == "name_value" + assert response.priority == 898 + assert response.self_link == "self_link_value" + assert response.self_link_with_id == "self_link_with_id_value" + assert response.state == "state_value" + assert response.update_timestamp == "update_timestamp_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_rest_interceptors(null_interceptor): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ZoneVmExtensionPoliciesRestInterceptor(), + ) + client = ZoneVmExtensionPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_get" + ) as post, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_get_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "pre_get" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.GetZoneVmExtensionPolicyRequest.pb( + compute.GetZoneVmExtensionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.VmExtensionPolicy.to_json(compute.VmExtensionPolicy()) + req.return_value.content = return_value + + request = compute.GetZoneVmExtensionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VmExtensionPolicy() + post_with_metadata.return_value = compute.VmExtensionPolicy(), metadata + + client.get( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_insert_rest_bad_request( + request_type=compute.InsertZoneVmExtensionPolicyRequest, +): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.insert(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.InsertZoneVmExtensionPolicyRequest, + dict, + ], +) +def test_insert_rest_call_success(request_type): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request_init["vm_extension_policy_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "extension_policies": {}, + "global_resource_link": "global_resource_link_value", + "id": 205, + "instance_selectors": [{"label_selector": {"inclusion_labels": {}}}], + "kind": "kind_value", + "managed_by_global": True, + "name": "name_value", + "priority": 898, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "update_timestamp": "update_timestamp_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.InsertZoneVmExtensionPolicyRequest.meta.fields[ + "vm_extension_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vm_extension_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["vm_extension_policy_resource"][field]) + ): + del request_init["vm_extension_policy_resource"][field][i][subfield] + else: + del request_init["vm_extension_policy_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.insert(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_insert_rest_interceptors(null_interceptor): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ZoneVmExtensionPoliciesRestInterceptor(), + ) + client = ZoneVmExtensionPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_insert" + ) as post, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, + "post_insert_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "pre_insert" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.InsertZoneVmExtensionPolicyRequest.pb( + compute.InsertZoneVmExtensionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.InsertZoneVmExtensionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.insert( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_rest_bad_request(request_type=compute.ListZoneVmExtensionPoliciesRequest): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.ListZoneVmExtensionPoliciesRequest, + dict, + ], +) +def test_list_rest_call_success(request_type): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"project": "sample1", "zone": "sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.VmExtensionPolicyList( + etag="etag_value", + id="id_value", + kind="kind_value", + next_page_token="next_page_token_value", + self_link="self_link_value", + unreachables=["unreachables_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.VmExtensionPolicyList.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListPager) + assert response.etag == "etag_value" + assert response.id == "id_value" + assert response.kind == "kind_value" + assert response.next_page_token == "next_page_token_value" + assert response.self_link == "self_link_value" + assert response.unreachables == ["unreachables_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_rest_interceptors(null_interceptor): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ZoneVmExtensionPoliciesRestInterceptor(), + ) + client = ZoneVmExtensionPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_list" + ) as post, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_list_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "pre_list" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.ListZoneVmExtensionPoliciesRequest.pb( + compute.ListZoneVmExtensionPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.VmExtensionPolicyList.to_json( + compute.VmExtensionPolicyList() + ) + req.return_value.content = return_value + + request = compute.ListZoneVmExtensionPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.VmExtensionPolicyList() + post_with_metadata.return_value = compute.VmExtensionPolicyList(), metadata + + client.list( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_rest_bad_request( + request_type=compute.UpdateZoneVmExtensionPolicyRequest, +): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update(request) + + +@pytest.mark.parametrize( + "request_type", + [ + compute.UpdateZoneVmExtensionPolicyRequest, + dict, + ], +) +def test_update_rest_call_success(request_type): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "project": "sample1", + "zone": "sample2", + "vm_extension_policy": "sample3", + } + request_init["vm_extension_policy_resource"] = { + "creation_timestamp": "creation_timestamp_value", + "description": "description_value", + "extension_policies": {}, + "global_resource_link": "global_resource_link_value", + "id": 205, + "instance_selectors": [{"label_selector": {"inclusion_labels": {}}}], + "kind": "kind_value", + "managed_by_global": True, + "name": "name_value", + "priority": 898, + "self_link": "self_link_value", + "self_link_with_id": "self_link_with_id_value", + "state": "state_value", + "update_timestamp": "update_timestamp_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = compute.UpdateZoneVmExtensionPolicyRequest.meta.fields[ + "vm_extension_policy_resource" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "vm_extension_policy_resource" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range( + 0, len(request_init["vm_extension_policy_resource"][field]) + ): + del request_init["vm_extension_policy_resource"][field][i][subfield] + else: + del request_init["vm_extension_policy_resource"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = compute.Operation( + client_operation_id="client_operation_id_value", + creation_timestamp="creation_timestamp_value", + description="description_value", + end_time="end_time_value", + http_error_message="http_error_message_value", + http_error_status_code=2374, + id=205, + insert_time="insert_time_value", + kind="kind_value", + name="name_value", + operation_group_id="operation_group_id_value", + operation_type="operation_type_value", + progress=885, + region="region_value", + self_link="self_link_value", + start_time="start_time_value", + status=compute.Operation.Status.DONE, + status_message="status_message_value", + target_id=947, + target_link="target_link_value", + user="user_value", + zone="zone_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = compute.Operation.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, extended_operation.ExtendedOperation) + assert response.client_operation_id == "client_operation_id_value" + assert response.creation_timestamp == "creation_timestamp_value" + assert response.description == "description_value" + assert response.end_time == "end_time_value" + assert response.http_error_message == "http_error_message_value" + assert response.http_error_status_code == 2374 + assert response.id == 205 + assert response.insert_time == "insert_time_value" + assert response.kind == "kind_value" + assert response.name == "name_value" + assert response.operation_group_id == "operation_group_id_value" + assert response.operation_type == "operation_type_value" + assert response.progress == 885 + assert response.region == "region_value" + assert response.self_link == "self_link_value" + assert response.start_time == "start_time_value" + assert response.status == compute.Operation.Status.DONE + assert response.status_message == "status_message_value" + assert response.target_id == 947 + assert response.target_link == "target_link_value" + assert response.user == "user_value" + assert response.zone == "zone_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_rest_interceptors(null_interceptor): + transport = transports.ZoneVmExtensionPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ZoneVmExtensionPoliciesRestInterceptor(), + ) + client = ZoneVmExtensionPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "post_update" + ) as post, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, + "post_update_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ZoneVmExtensionPoliciesRestInterceptor, "pre_update" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = compute.UpdateZoneVmExtensionPolicyRequest.pb( + compute.UpdateZoneVmExtensionPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = compute.Operation.to_json(compute.Operation()) + req.return_value.content = return_value + + request = compute.UpdateZoneVmExtensionPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = compute.Operation() + post_with_metadata.return_value = compute.Operation(), metadata + + client.update( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_initialize_client_w_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_unary_empty_call_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete), "__call__") as call: + client.delete_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.DeleteZoneVmExtensionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_empty_call_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get), "__call__") as call: + client.get(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.GetZoneVmExtensionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_insert_unary_empty_call_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.insert), "__call__") as call: + client.insert_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.InsertZoneVmExtensionPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_empty_call_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list), "__call__") as call: + client.list(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.ListZoneVmExtensionPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_unary_empty_call_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.update), "__call__") as call: + client.update_unary(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = compute.UpdateZoneVmExtensionPolicyRequest() + + assert args[0] == request_msg + + +def test_zone_vm_extension_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.ZoneVmExtensionPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_zone_vm_extension_policies_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.compute_v1.services.zone_vm_extension_policies.transports.ZoneVmExtensionPoliciesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.ZoneVmExtensionPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "delete", + "get", + "insert", + "list", + "update", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_zone_vm_extension_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.compute_v1.services.zone_vm_extension_policies.transports.ZoneVmExtensionPoliciesTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ZoneVmExtensionPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_zone_vm_extension_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.compute_v1.services.zone_vm_extension_policies.transports.ZoneVmExtensionPoliciesTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.ZoneVmExtensionPoliciesTransport() + adc.assert_called_once() + + +def test_zone_vm_extension_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + ZoneVmExtensionPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/compute", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +def test_zone_vm_extension_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.ZoneVmExtensionPoliciesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_zone_vm_extension_policies_host_no_port(transport_name): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_zone_vm_extension_policies_host_with_port(transport_name): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="compute.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "compute.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://compute.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_zone_vm_extension_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = ZoneVmExtensionPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = ZoneVmExtensionPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.delete._session + session2 = client2.transport.delete._session + assert session1 != session2 + session1 = client1.transport.get._session + session2 = client2.transport.get._session + assert session1 != session2 + session1 = client1.transport.insert._session + session2 = client2.transport.insert._session + assert session1 != session2 + session1 = client1.transport.list._session + session2 = client2.transport.list._session + assert session1 != session2 + session1 = client1.transport.update._session + session2 = client2.transport.update._session + assert session1 != session2 + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = ZoneVmExtensionPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = ZoneVmExtensionPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneVmExtensionPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = ZoneVmExtensionPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = ZoneVmExtensionPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneVmExtensionPoliciesClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = ZoneVmExtensionPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = ZoneVmExtensionPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneVmExtensionPoliciesClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = ZoneVmExtensionPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = ZoneVmExtensionPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneVmExtensionPoliciesClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = ZoneVmExtensionPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = ZoneVmExtensionPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = ZoneVmExtensionPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.ZoneVmExtensionPoliciesTransport, "_prep_wrapped_messages" + ) as prep: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.ZoneVmExtensionPoliciesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = ZoneVmExtensionPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_transport_close_rest(): + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + ] + for transport in transports: + client = ZoneVmExtensionPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + ( + ZoneVmExtensionPoliciesClient, + transports.ZoneVmExtensionPoliciesRestTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/backup.py b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/backup.py index d817e7a72038..3a664f311a63 100644 --- a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/backup.py +++ b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/backup.py @@ -40,6 +40,12 @@ class Backup(proto.Message): Format is ``projects/{project}/locations/{location}/backups/{backup}``. + + The location in the name will be the Standard Managed + Multi-Region (SMMR) location (e.g. ``us``) if the backup was + created with an SMMR location, or the Google Managed + Multi-Region (GMMR) location (e.g. ``nam5``) if the backup + was created with a GMMR location. database (str): Output only. Name of the Firestore database that the backup is from. diff --git a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/index.py b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/index.py index 01d8587f078d..9621a8b7f8a5 100644 --- a/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/index.py +++ b/packages/google-cloud-firestore/google/cloud/firestore_admin_v1/types/index.py @@ -88,6 +88,10 @@ class Index(proto.Message): Optional. Whether it is an unique index. Unique index ensures all values for the indexed field(s) are unique across documents. + search_index_options (google.cloud.firestore_admin_v1.types.Index.SearchIndexOptions): + Optional. Options for search indexes that are at the index + definition level. This field is only currently supported for + indexes with MONGODB_COMPATIBLE_API ApiScope. """ class QueryScope(proto.Enum): @@ -285,6 +289,12 @@ class IndexField(proto.Message): Indicates that this field supports nearest neighbor and distance operations on vector. + This field is a member of `oneof`_ ``value_mode``. + search_config (google.cloud.firestore_admin_v1.types.Index.IndexField.SearchConfig): + Indicates that this field supports search operations. This + field is only currently supported for indexes with + MONGODB_COMPATIBLE_API ApiScope. + This field is a member of `oneof`_ ``value_mode``. """ @@ -356,6 +366,116 @@ class FlatIndex(proto.Message): message="Index.IndexField.VectorConfig.FlatIndex", ) + class SearchConfig(proto.Message): + r"""The configuration for how to index a field for search. + + Attributes: + text_spec (google.cloud.firestore_admin_v1.types.Index.IndexField.SearchConfig.SearchTextSpec): + Optional. The specification for building a + text search index for a field. + geo_spec (google.cloud.firestore_admin_v1.types.Index.IndexField.SearchConfig.SearchGeoSpec): + Optional. The specification for building a + geo search index for a field. + """ + + class TextIndexType(proto.Enum): + r"""Ways to index the text field value. + + Values: + TEXT_INDEX_TYPE_UNSPECIFIED (0): + The index type is unspecified. Not a valid + option. + TOKENIZED (1): + Field values are tokenized. This is the only way currently + supported for MONGODB_COMPATIBLE_API. + """ + + TEXT_INDEX_TYPE_UNSPECIFIED = 0 + TOKENIZED = 1 + + class TextMatchType(proto.Enum): + r"""Types of text matches that are supported for the + field. + + Values: + TEXT_MATCH_TYPE_UNSPECIFIED (0): + The match type is unspecified. Not a valid + option. + MATCH_GLOBALLY (1): + Match on any indexed field. This is the only way currently + supported for MONGODB_COMPATIBLE_API. + """ + + TEXT_MATCH_TYPE_UNSPECIFIED = 0 + MATCH_GLOBALLY = 1 + + class SearchTextIndexSpec(proto.Message): + r"""Specification of how the field should be indexed for search + text indexes. + + Attributes: + index_type (google.cloud.firestore_admin_v1.types.Index.IndexField.SearchConfig.TextIndexType): + Required. How to index the text field value. + match_type (google.cloud.firestore_admin_v1.types.Index.IndexField.SearchConfig.TextMatchType): + Required. How to match the text field value. + """ + + index_type: "Index.IndexField.SearchConfig.TextIndexType" = proto.Field( + proto.ENUM, + number=1, + enum="Index.IndexField.SearchConfig.TextIndexType", + ) + match_type: "Index.IndexField.SearchConfig.TextMatchType" = proto.Field( + proto.ENUM, + number=2, + enum="Index.IndexField.SearchConfig.TextMatchType", + ) + + class SearchTextSpec(proto.Message): + r"""The specification for how to build a text search index for a + field. + + Attributes: + index_specs (MutableSequence[google.cloud.firestore_admin_v1.types.Index.IndexField.SearchConfig.SearchTextIndexSpec]): + Required. Specifications for how the field + should be indexed. Repeated so that the field + can be indexed in multiple ways. + """ + + index_specs: MutableSequence[ + "Index.IndexField.SearchConfig.SearchTextIndexSpec" + ] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Index.IndexField.SearchConfig.SearchTextIndexSpec", + ) + + class SearchGeoSpec(proto.Message): + r"""The specification for how to build a geo search index for a + field. + + Attributes: + geo_json_indexing_disabled (bool): + Optional. Disables geoJSON indexing for the + field. By default, geoJSON points are indexed. + """ + + geo_json_indexing_disabled: bool = proto.Field( + proto.BOOL, + number=1, + ) + + text_spec: "Index.IndexField.SearchConfig.SearchTextSpec" = proto.Field( + proto.MESSAGE, + number=1, + message="Index.IndexField.SearchConfig.SearchTextSpec", + ) + geo_spec: "Index.IndexField.SearchConfig.SearchGeoSpec" = proto.Field( + proto.MESSAGE, + number=2, + message="Index.IndexField.SearchConfig.SearchGeoSpec", + ) + field_path: str = proto.Field( proto.STRING, number=1, @@ -378,6 +498,42 @@ class FlatIndex(proto.Message): oneof="value_mode", message="Index.IndexField.VectorConfig", ) + search_config: "Index.IndexField.SearchConfig" = proto.Field( + proto.MESSAGE, + number=5, + oneof="value_mode", + message="Index.IndexField.SearchConfig", + ) + + class SearchIndexOptions(proto.Message): + r"""Options for search indexes at the definition level. + + Attributes: + text_language (str): + Optional. The language to use for text search indexes. Used + as the default language if not overridden at the document + level by specifying the ``text_language_override_field``. + The language is specified as a BCP 47 language code. For + indexes with MONGODB_COMPATIBLE_API ApiScope: If + unspecified, the default language is English. For indexes + with ``ANY_API`` ApiScope: If unspecified, the default + behavior is autodetect. + text_language_override_field_path (str): + Optional. The field in the document that specifies which + language to use for that specific document. For indexes with + MONGODB_COMPATIBLE_API ApiScope: if unspecified, the + language is taken from the "language" field if it exists or + from ``text_language`` if it does not. + """ + + text_language: str = proto.Field( + proto.STRING, + number=1, + ) + text_language_override_field_path: str = proto.Field( + proto.STRING, + number=2, + ) name: str = proto.Field( proto.STRING, @@ -420,6 +576,11 @@ class FlatIndex(proto.Message): proto.BOOL, number=10, ) + search_index_options: SearchIndexOptions = proto.Field( + proto.MESSAGE, + number=9, + message=SearchIndexOptions, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py b/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py index 899a0e57d3c9..d5b2d86f85d8 100644 --- a/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py +++ b/packages/google-cloud-firestore/tests/unit/gapic/firestore_admin_v1/test_firestore_admin.py @@ -20071,6 +20071,10 @@ def test_create_index_rest_call_success(request_type): "order": 1, "array_config": 1, "vector_config": {"dimension": 966, "flat": {}}, + "search_config": { + "text_spec": {"index_specs": [{"index_type": 1, "match_type": 1}]}, + "geo_spec": {"geo_json_indexing_disabled": True}, + }, } ], "state": 1, @@ -20078,6 +20082,10 @@ def test_create_index_rest_call_success(request_type): "multikey": True, "shard_count": 1178, "unique": True, + "search_index_options": { + "text_language": "text_language_value", + "text_language_override_field_path": "text_language_override_field_path_value", + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -20818,6 +20826,12 @@ def test_update_field_rest_call_success(request_type): "order": 1, "array_config": 1, "vector_config": {"dimension": 966, "flat": {}}, + "search_config": { + "text_spec": { + "index_specs": [{"index_type": 1, "match_type": 1}] + }, + "geo_spec": {"geo_json_indexing_disabled": True}, + }, } ], "state": 1, @@ -20825,6 +20839,10 @@ def test_update_field_rest_call_success(request_type): "multikey": True, "shard_count": 1178, "unique": True, + "search_index_options": { + "text_language": "text_language_value", + "text_language_override_field_path": "text_language_override_field_path_value", + }, } ], "uses_ancestor_config": True, diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py index cfa6a1051f5e..76a4f4ed1e77 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1/types/memorystore.py @@ -374,6 +374,18 @@ class NodeType(proto.Enum): High memory extra large. STANDARD_SMALL (4): Standard small. + CUSTOM_MICRO (5): + Custom micro. + CUSTOM_MINI (6): + Custom mini. + HIGHCPU_MEDIUM (7): + High cpu medium. + STANDARD_LARGE (8): + Standard large. + HIGHMEM_2XLARGE (9): + High memory 2x large. + CUSTOM_PICO (10): + Custom pico. """ NODE_TYPE_UNSPECIFIED = 0 @@ -381,6 +393,12 @@ class NodeType(proto.Enum): HIGHMEM_MEDIUM = 2 HIGHMEM_XLARGE = 3 STANDARD_SMALL = 4 + CUSTOM_MICRO = 5 + CUSTOM_MINI = 6 + HIGHCPU_MEDIUM = 7 + STANDARD_LARGE = 8 + HIGHMEM_2XLARGE = 9 + CUSTOM_PICO = 10 class Mode(proto.Enum): r"""The mode config, which is used to enable/disable cluster diff --git a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py index 5773aba0a9ff..fc9a1e09523c 100644 --- a/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py +++ b/packages/google-cloud-memorystore/google/cloud/memorystore_v1beta/types/memorystore.py @@ -253,6 +253,18 @@ class NodeType(proto.Enum): High memory extra large. STANDARD_SMALL (4): Standard small. + CUSTOM_MICRO (5): + Custom micro. + CUSTOM_MINI (6): + Custom mini. + HIGHCPU_MEDIUM (7): + High cpu medium. + STANDARD_LARGE (8): + Standard large. + HIGHMEM_2XLARGE (9): + High memory 2xlarge. + CUSTOM_PICO (10): + Custom pico. """ NODE_TYPE_UNSPECIFIED = 0 @@ -260,6 +272,12 @@ class NodeType(proto.Enum): HIGHMEM_MEDIUM = 2 HIGHMEM_XLARGE = 3 STANDARD_SMALL = 4 + CUSTOM_MICRO = 5 + CUSTOM_MINI = 6 + HIGHCPU_MEDIUM = 7 + STANDARD_LARGE = 8 + HIGHMEM_2XLARGE = 9 + CUSTOM_PICO = 10 class Mode(proto.Enum): r"""The mode config, which is used to enable/disable cluster diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py index 71940954dca5..fde2dce5dc4d 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1/types/cloud_redis_cluster.py @@ -125,6 +125,12 @@ class NodeType(proto.Enum): Redis highmem xlarge node_type. REDIS_STANDARD_SMALL (4): Redis standard small node_type. + REDIS_HIGHCPU_MEDIUM (7): + Redis highcpu medium node_type. + REDIS_STANDARD_LARGE (8): + Redis standard large node_type. + REDIS_HIGHMEM_2XLARGE (9): + Redis highmem 2xlarge node_type. """ NODE_TYPE_UNSPECIFIED = 0 @@ -132,6 +138,9 @@ class NodeType(proto.Enum): REDIS_HIGHMEM_MEDIUM = 2 REDIS_HIGHMEM_XLARGE = 3 REDIS_STANDARD_SMALL = 4 + REDIS_HIGHCPU_MEDIUM = 7 + REDIS_STANDARD_LARGE = 8 + REDIS_HIGHMEM_2XLARGE = 9 class TransitEncryptionMode(proto.Enum): diff --git a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py index dd6b41f6193b..a4f99e324cdb 100644 --- a/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py +++ b/packages/google-cloud-redis-cluster/google/cloud/redis_cluster_v1beta1/types/cloud_redis_cluster.py @@ -125,6 +125,12 @@ class NodeType(proto.Enum): Redis highmem xlarge node_type. REDIS_STANDARD_SMALL (4): Redis standard small node_type. + REDIS_HIGHCPU_MEDIUM (7): + Redis highcpu medium node_type. + REDIS_STANDARD_LARGE (8): + Redis standard large node_type. + REDIS_HIGHMEM_2XLARGE (9): + Redis highmem 2xlarge node_type. """ NODE_TYPE_UNSPECIFIED = 0 @@ -132,6 +138,9 @@ class NodeType(proto.Enum): REDIS_HIGHMEM_MEDIUM = 2 REDIS_HIGHMEM_XLARGE = 3 REDIS_STANDARD_SMALL = 4 + REDIS_HIGHCPU_MEDIUM = 7 + REDIS_STANDARD_LARGE = 8 + REDIS_HIGHMEM_2XLARGE = 9 class TransitEncryptionMode(proto.Enum): diff --git a/packages/google-cloud-spanner/google/cloud/spanner_v1/types/commit_response.py b/packages/google-cloud-spanner/google/cloud/spanner_v1/types/commit_response.py index 455a57eebc42..7412a5b4ff66 100644 --- a/packages/google-cloud-spanner/google/cloud/spanner_v1/types/commit_response.py +++ b/packages/google-cloud-spanner/google/cloud/spanner_v1/types/commit_response.py @@ -62,6 +62,12 @@ class CommitResponse(proto.Message): to requests that included a ``RoutingHint`` field, but may also be obtained by explicit location-fetching RPCs which may be added in the future. + isolation_level (google.cloud.spanner_v1.types.TransactionOptions.IsolationLevel): + The isolation level used for the read-write + transaction. + read_lock_mode (google.cloud.spanner_v1.types.TransactionOptions.ReadWrite.ReadLockMode): + The read lock mode used for the read-write + transaction. """ class CommitStats(proto.Message): @@ -111,6 +117,16 @@ class CommitStats(proto.Message): number=6, message=location.CacheUpdate, ) + isolation_level: transaction.TransactionOptions.IsolationLevel = proto.Field( + proto.ENUM, + number=7, + enum=transaction.TransactionOptions.IsolationLevel, + ) + read_lock_mode: transaction.TransactionOptions.ReadWrite.ReadLockMode = proto.Field( + proto.ENUM, + number=8, + enum=transaction.TransactionOptions.ReadWrite.ReadLockMode, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-spanner/tests/unit/gapic/spanner_v1/test_spanner.py b/packages/google-cloud-spanner/tests/unit/gapic/spanner_v1/test_spanner.py index cbdc96045a7f..9c240cf2a0c2 100644 --- a/packages/google-cloud-spanner/tests/unit/gapic/spanner_v1/test_spanner.py +++ b/packages/google-cloud-spanner/tests/unit/gapic/spanner_v1/test_spanner.py @@ -4716,7 +4716,10 @@ def test_commit(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = commit_response.CommitResponse() + call.return_value = commit_response.CommitResponse( + isolation_level=transaction.TransactionOptions.IsolationLevel.SERIALIZABLE, + read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC, + ) response = client.commit(request) # Establish that the underlying gRPC stub method was called. @@ -4727,6 +4730,14 @@ def test_commit(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, commit_response.CommitResponse) + assert ( + response.isolation_level + == transaction.TransactionOptions.IsolationLevel.SERIALIZABLE + ) + assert ( + response.read_lock_mode + == transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC + ) def test_commit_non_empty_request_with_auto_populated_field(): @@ -4849,7 +4860,10 @@ async def test_commit_async( with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - commit_response.CommitResponse() + commit_response.CommitResponse( + isolation_level=transaction.TransactionOptions.IsolationLevel.SERIALIZABLE, + read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC, + ) ) response = await client.commit(request) @@ -4861,6 +4875,14 @@ async def test_commit_async( # Establish that the response is the type that we expect. assert isinstance(response, commit_response.CommitResponse) + assert ( + response.isolation_level + == transaction.TransactionOptions.IsolationLevel.SERIALIZABLE + ) + assert ( + response.read_lock_mode + == transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC + ) @pytest.mark.asyncio @@ -9749,7 +9771,10 @@ async def test_commit_empty_call_grpc_asyncio(): with mock.patch.object(type(client.transport.commit), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - commit_response.CommitResponse() + commit_response.CommitResponse( + isolation_level=transaction.TransactionOptions.IsolationLevel.SERIALIZABLE, + read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC, + ) ) await client.commit(request=None) @@ -11319,7 +11344,10 @@ def test_commit_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = commit_response.CommitResponse() + return_value = commit_response.CommitResponse( + isolation_level=transaction.TransactionOptions.IsolationLevel.SERIALIZABLE, + read_lock_mode=transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() @@ -11335,6 +11363,14 @@ def test_commit_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, commit_response.CommitResponse) + assert ( + response.isolation_level + == transaction.TransactionOptions.IsolationLevel.SERIALIZABLE + ) + assert ( + response.read_lock_mode + == transaction.TransactionOptions.ReadWrite.ReadLockMode.PESSIMISTIC + ) @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py index 4f0c9265575c..c428b7a35630 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/base.py @@ -202,12 +202,30 @@ def _prep_wrapped_messages(self, client_info): ), self.list_bucket_operations: gapic_v1.method.wrap_method( self.list_bucket_operations, - default_timeout=None, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.get_bucket_operation: gapic_v1.method.wrap_method( self.get_bucket_operation, - default_timeout=None, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.get_location: gapic_v1.method.wrap_method( diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py index 8af085472848..e4aa04840c09 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/services/storage_batch_operations/transports/grpc_asyncio.py @@ -618,12 +618,30 @@ def _prep_wrapped_messages(self, client_info): ), self.list_bucket_operations: self._wrap_method( self.list_bucket_operations, - default_timeout=None, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.get_bucket_operation: self._wrap_method( self.get_bucket_operation, - default_timeout=None, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=2, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, client_info=client_info, ), self.get_location: self._wrap_method( diff --git a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py index d38a03ffc415..715b2d879c65 100644 --- a/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py +++ b/packages/google-cloud-storagebatchoperations/google/cloud/storagebatchoperations_v1/types/storage_batch_operations_types.py @@ -947,13 +947,37 @@ class Counters(proto.Message): succeeded_object_count (int): Output only. Number of objects completed. failed_object_count (int): - Output only. Number of objects failed. + Output only. The number of objects that + failed due to user errors or service errors. total_bytes_found (int): Output only. Number of bytes found from source. This field is only populated for jobs with a prefix list object configuration. This field is a member of `oneof`_ ``_total_bytes_found``. + object_custom_contexts_created (int): + Output only. Number of object custom contexts + created. This field is only populated for jobs + with the UpdateObjectCustomContext + transformation. + + This field is a member of `oneof`_ ``_object_custom_contexts_created``. + object_custom_contexts_deleted (int): + Output only. Number of object custom contexts + deleted. This field is only populated for jobs + with the UpdateObjectCustomContext + transformation. + + This field is a member of `oneof`_ ``_object_custom_contexts_deleted``. + object_custom_contexts_updated (int): + Output only. Number of object custom contexts + updated. This counter tracks custom contexts + where the key already existed, but the payload + was modified. This field is only populated for + jobs with the UpdateObjectCustomContext + transformation. + + This field is a member of `oneof`_ ``_object_custom_contexts_updated``. """ total_object_count: int = proto.Field( @@ -973,6 +997,21 @@ class Counters(proto.Message): number=4, optional=True, ) + object_custom_contexts_created: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + object_custom_contexts_deleted: int = proto.Field( + proto.INT64, + number=6, + optional=True, + ) + object_custom_contexts_updated: int = proto.Field( + proto.INT64, + number=7, + optional=True, + ) class LoggingConfig(proto.Message): diff --git a/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py b/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py index 8d0c29102d0c..49cc554a27d1 100644 --- a/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py +++ b/packages/google-cloud-storagebatchoperations/tests/unit/gapic/storagebatchoperations_v1/test_storage_batch_operations.py @@ -6451,6 +6451,9 @@ def test_create_job_rest_call_success(request_type): "succeeded_object_count": 2307, "failed_object_count": 1987, "total_bytes_found": 1829, + "object_custom_contexts_created": 3199, + "object_custom_contexts_deleted": 3198, + "object_custom_contexts_updated": 3214, }, "error_summaries": [ { diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py index c034fb2deb0e..d1e3aa33953f 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports/__init__.py @@ -42,6 +42,7 @@ ReportRow, SearchRequest, SearchResponse, + StoreType, TrafficSource, ) @@ -65,5 +66,6 @@ "ReportRow", "SearchRequest", "SearchResponse", + "StoreType", "TrafficSource", ) diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/__init__.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/__init__.py index 9fae98dbd8fa..d35d4e009303 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/__init__.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/__init__.py @@ -48,6 +48,7 @@ ReportRow, SearchRequest, SearchResponse, + StoreType, TrafficSource, ) @@ -165,5 +166,6 @@ def _get_version(dependency_name): "ReportServiceClient", "SearchRequest", "SearchResponse", + "StoreType", "TrafficSource", ) diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/__init__.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/__init__.py index 30ac22fb5efe..83681af2dedb 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/__init__.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/__init__.py @@ -31,6 +31,7 @@ ReportRow, SearchRequest, SearchResponse, + StoreType, TrafficSource, ) @@ -52,5 +53,6 @@ "ReportRow", "SearchRequest", "SearchResponse", + "StoreType", "TrafficSource", ) diff --git a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/reports.py b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/reports.py index 7c297dc233bd..939b394c890b 100644 --- a/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/reports.py +++ b/packages/google-shopping-merchant-reports/google/shopping/merchant_reports_v1/types/reports.py @@ -39,6 +39,7 @@ "CompetitiveVisibilityTopMerchantView", "CompetitiveVisibilityBenchmarkView", "MarketingMethod", + "StoreType", "ReportGranularity", "RelativeDemand", "RelativeDemandChangeType", @@ -62,12 +63,12 @@ class SearchRequest(proto.Message): Language guide `__. For the full list of available tables and fields, see the - `Available - fields `__. + [Available + fields][google.shopping.merchant.reports.v1.ReportRow]. page_size (int): Optional. Number of ``ReportRows`` to retrieve in a single - page. Defaults to 1000. Values above 5000 are coerced to - 5000. + page. Defaults to 1000. Values above 100,000 are coerced to + 100,000. page_token (str): Optional. Token of the page to retrieve. If not specified, the first page of results is returned. In order to request @@ -257,6 +258,14 @@ class ProductPerformanceView(proto.Message): special 'ZZ' code is returned. This field is a member of `oneof`_ ``_customer_country_code``. + store_type (google.shopping.merchant_reports_v1.types.StoreType.StoreTypeEnum): + Store type to which metrics apply. Can be ``ONLINE_STORE`` + or ``LOCAL_STORES``. Segment. + + For ``LOCAL_STORES`` store type, further segmentation by a + specific store is not available. + + This field is a member of `oneof`_ ``_store_type``. offer_id (str): Merchant-provided id of the product. Segment. @@ -415,6 +424,12 @@ class ProductPerformanceView(proto.Message): number=4, optional=True, ) + store_type: "StoreType.StoreTypeEnum" = proto.Field( + proto.ENUM, + number=32, + optional=True, + enum="StoreType.StoreTypeEnum", + ) offer_id: str = proto.Field( proto.STRING, number=5, @@ -541,11 +556,13 @@ class ProductView(proto.Message): r"""Fields available for query in ``product_view`` table. Products in the current inventory. Products in this table are the - same as in Products sub-API but not all product attributes from - Products sub-API are available for query in this table. In contrast - to Products sub-API, this table allows to filter the returned list - of products by product attributes. To retrieve a single product by - ``id`` or list all products, Products sub-API should be used. + same as a `Product resource in Products + sub-API `__ + but not all product attributes from Products sub-API are available + for query in this table. In contrast to Products sub-API, this table + allows to filter the returned list of products by product + attributes. To retrieve a single product by ``id`` or list all + products, Products sub-API should be used. Values are only set for fields requested explicitly in the request's search query. @@ -677,9 +694,36 @@ class ProductView(proto.Message): Expiration date for the product, specified on insertion. aggregated_reporting_context_status (google.shopping.merchant_reports_v1.types.ProductView.AggregatedReportingContextStatus): - Aggregated status. + Aggregated status across all reporting contexts. + + Reporting contexts included in the computation of the + aggregated status can be restricted using a filter on the + ``reporting_context`` field. This field is a member of `oneof`_ ``_aggregated_reporting_context_status``. + status_per_reporting_context (MutableSequence[google.shopping.merchant_reports_v1.types.ProductView.StatusPerReportingContext]): + Detailed product status per reporting context. + + Reporting contexts included in this list can be restricted + using a filter on the ``reporting_context`` field. + + Equivalent to + [``ProductStatus.destination_statuses``][google.shopping.merchant.products.v1.ProductStatus] + in Products API. + + **This field cannot be used for sorting or filtering the + results.** + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + Reporting context to restrict the query to. + + Restricts the reporting contexts returned in + ``status_per_reporting_context`` and ``item_issues``, and + used to compute ``aggregated_reporting_context_status``. + + **This field can only be used in the ``WHERE`` clause and + cannot be selected in the ``SELECT`` clause.** + + This field is a member of `oneof`_ ``_reporting_context``. item_issues (MutableSequence[google.shopping.merchant_reports_v1.types.ProductView.ItemIssue]): List of item issues for the product. @@ -692,10 +736,10 @@ class ProductView(proto.Message): Estimated performance potential compared to highest performing products of the merchant. click_potential_rank (int): - Rank of the product based on its click potential. A product - with ``click_potential_rank`` 1 has the highest click - potential among the merchant's products that fulfill the - search query conditions. + Normalized click potential of the product. + Values range from 1 to 1000, where 1 is the + highest click potential and 1000 is the + theoretical lowest. This field is a member of `oneof`_ ``_click_potential_rank``. """ @@ -703,6 +747,10 @@ class ProductView(proto.Message): class AggregatedReportingContextStatus(proto.Enum): r"""Status of the product aggregated for all reporting contexts. + Reporting contexts included in the computation of the aggregated + status can be restricted using a filter on the ``reporting_context`` + field. + Here's an example of how the aggregated status is computed: ``` @@ -718,16 +766,16 @@ class AggregatedReportingContextStatus(proto.Enum): Not specified. NOT_ELIGIBLE_OR_DISAPPROVED (1): Product is not eligible or is disapproved for - all reporting contexts. + all reporting contexts and countries. PENDING (2): Product's status is pending in all reporting - contexts. + contexts and countries. ELIGIBLE_LIMITED (3): Product is eligible for some (but not all) - reporting contexts. + reporting contexts and countries. ELIGIBLE (4): Product is eligible for all reporting - contexts. + contexts and countries. """ AGGREGATED_REPORTING_CONTEXT_STATUS_UNSPECIFIED = 0 @@ -840,10 +888,17 @@ class ItemIssueSeverity(proto.Message): Attributes: severity_per_reporting_context (MutableSequence[google.shopping.merchant_reports_v1.types.ProductView.ItemIssue.ItemIssueSeverity.IssueSeverityPerReportingContext]): Issue severity per reporting context. + + Reporting contexts included in this list can be restricted + using a filter on the ``reporting_context`` field. aggregated_severity (google.shopping.merchant_reports_v1.types.ProductView.ItemIssue.ItemIssueSeverity.AggregatedIssueSeverity): Aggregated severity of the issue for all reporting contexts it affects. + Reporting contexts included in the computation of the + aggregated severity can be restricted using a filter on the + ``reporting_context`` field. + **This field can be used for filtering the results.** This field is a member of `oneof`_ ``_aggregated_severity``. @@ -937,6 +992,57 @@ class IssueSeverityPerReportingContext(proto.Message): enum="ProductView.ItemIssue.ItemIssueResolution", ) + class StatusPerReportingContext(proto.Message): + r"""Status of the product for a specific reporting context. + + Equivalent to + [``DestinationStatus``][google.shopping.merchant.products.v1.ProductStatus.DestinationStatus] + in Products API. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + reporting_context (google.shopping.type.types.ReportingContext.ReportingContextEnum): + Reporting context the status applies to. + + This field is a member of `oneof`_ ``_reporting_context``. + approved_countries (MutableSequence[str]): + List of approved countries in the reporting context, + represented in `ISO + 3166 `__ + format, for example, ``US``. + disapproved_countries (MutableSequence[str]): + List of disapproved countries in the reporting context, + represented in `ISO + 3166 `__ + format, for example, ``US``. + pending_countries (MutableSequence[str]): + List of pending countries in the reporting context, + represented in `ISO + 3166 `__ + format, for example, ``US``. + """ + + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=1, + optional=True, + enum=types.ReportingContext.ReportingContextEnum, + ) + approved_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + disapproved_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + pending_countries: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + id: str = proto.Field( proto.STRING, number=1, @@ -1073,6 +1179,19 @@ class IssueSeverityPerReportingContext(proto.Message): optional=True, enum=AggregatedReportingContextStatus, ) + status_per_reporting_context: MutableSequence[StatusPerReportingContext] = ( + proto.RepeatedField( + proto.MESSAGE, + number=32, + message=StatusPerReportingContext, + ) + ) + reporting_context: types.ReportingContext.ReportingContextEnum = proto.Field( + proto.ENUM, + number=33, + optional=True, + enum=types.ReportingContext.ReportingContextEnum, + ) item_issues: MutableSequence[ItemIssue] = proto.RepeatedField( proto.MESSAGE, number=27, @@ -2440,6 +2559,26 @@ class MarketingMethodEnum(proto.Enum): ADS = 2 +class StoreType(proto.Message): + r"""Store where the product is sold (online versus local stores).""" + + class StoreTypeEnum(proto.Enum): + r"""Store types. + + Values: + STORE_TYPE_ENUM_UNSPECIFIED (0): + Not specified. + ONLINE_STORE (1): + Online store. + LOCAL_STORES (2): + Local (physical) stores. + """ + + STORE_TYPE_ENUM_UNSPECIFIED = 0 + ONLINE_STORE = 1 + LOCAL_STORES = 2 + + class ReportGranularity(proto.Message): r"""Granularity of the Best sellers report. Best sellers reports are computed over a week and a month timeframe.